0. Project Setup¶
0.1 Packages & Device¶
# Torch
import torch
import torch.nn as nn
import torch.optim as optim
import torchvision.transforms as transforms
import torchvision.datasets as datasets
import matplotlib.pyplot as plt
from torch.utils.data import Dataset, DataLoader, Subset, random_split
from tqdm import tqdm
# Augmentation
import albumentations as A
from albumentations.pytorch import ToTensorV2
import scipy.io as sio
# Visualize Result
from sklearn.metrics import (confusion_matrix, accuracy_score,
precision_score, recall_score,
f1_score, roc_auc_score,
roc_curve, auc, precision_recall_curve,
average_precision_score)
from sklearn.metrics import ConfusionMatrixDisplay
from sklearn.preprocessing import label_binarize
# Basic
import numpy as np
import cv2
import os
import time
from typing import List, Tuple, Union
import random
import itertools
device_name = "cuda" if torch.cuda.is_available() else "cpu"
device = torch.device(device_name)
print(f"Using device: {device_name}")
Using device: cuda
0.2 Global Configurations¶
path_dataset = "./data/SVHN_mat"
1. Data Processing and Augmentation¶
1.1 Download Datasets¶
Define dataset class, retrieve dataset.
class SVHNDataset(Dataset):
def __init__(self, mat_file, transform=None):
data = sio.loadmat(mat_file)
self.images = np.transpose(data['X'], (3, 0, 1, 2))
self.labels = data['y'].flatten()
self.labels[self.labels == 10] = 0
self.transform = transform
def __len__(self):
return len(self.labels)
def __getitem__(self, idx):
image = self.images[idx]
label = self.labels[idx]
if self.transform:
image = self.transform(image=image)['image']
return image, label
def get_meanstd(self):
images_ = self.images.astype(np.float32) / 255.0
mean = np.mean(images_, axis=(0,1,2))
std = np.std(images_, axis=(0,1,2), ddof=0)
return mean.tolist(), std.tolist()
1.2 Peak A Data¶
def peek(dataset):
def unnormalize(img, mean, std):
"""Revert the normalization for visualization."""
img = img * std + mean
return np.clip(img, 0, 1)
# Plotting multiple images in a grid
grid_rows, grid_cols = 1, 6
fig, axes = plt.subplots(grid_rows, grid_cols, figsize=(6, 6))
peek_index = random.randint(0, dataset.__len__()-1)
for i in range(grid_cols):
img_tensor, label = dataset.__getitem__(peek_index)
img = img_tensor.permute(1, 2, 0).numpy() # Convert to (H, W, C)
img = unnormalize(img, norm_mean, norm_std)
ax = axes[i] # Get subplot axis
ax.imshow(img)
ax.set_title(f"Label: {label}")
plt.tight_layout()
plt.show()
print(f"Peaking data from training set of index {peak_index}.\nImage Tnesor Size:{train_dataset.__getitem__(peak_index)[0].shape}")
2. Neural Network¶
2.1 Model Structure¶
class SmallVGG(nn.Module):
def __init__(self, frame_size=32):
super(SmallVGG, self).__init__()
self.frame_size = frame_size
self. conv_layers = nn.Sequential(
nn.Conv2d(3, 8, kernel_size=3, padding=1),
nn.ReLU(),
nn.Conv2d(8, 16, kernel_size=3, padding=1),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2, stride=2), # 16x16
nn.Conv2d(16, 32, kernel_size=3, padding=1),
nn.ReLU(),
nn.Conv2d(32, 32, kernel_size=3, padding=1),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2, stride=2), # 8x8
nn.Conv2d(32, 32, kernel_size=3, padding=1),
nn.ReLU(),
nn.Conv2d(32, 32, kernel_size=3, padding=1),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2, stride=2), # 4x4
)
self.fc_layers = nn.Sequential(
nn.Linear(frame_size * 4 * 4, 256),
nn.ReLU(),
nn.Linear(256, 10)
)
def forward(self, x):
x = self.conv_layers(x)
x = x.view(x.size(0), -1)
x = self.fc_layers(x)
return x
2.2 Train and Evaluate Function¶
def train_and_evaluate(model,
train_loader,
valid_loader,
criterion,
optimizer,
num_epochs=100):
# Record Losses to plot
train_losses = []
valid_losses = []
for epoch in range(num_epochs):
# Train
model.train()
running_loss = 0.0
for images, labels in tqdm(train_loader):
images, labels = images.to(device), labels.to(device)
optimizer.zero_grad()
outputs = model(images)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
running_loss += loss.item() * len(images)
train_losses.append(running_loss / len(train_loader))
# Evaluate
model.eval()
valid_loss = 0.0
with torch.no_grad():
for images, labels in valid_loader:
images, labels = images.to(device), labels.to(device)
outputs = model(images)
loss = criterion(outputs, labels)
valid_loss += loss.item() * len(images)
valid_losses.append(valid_loss / len(valid_loader))
print(f"Epoch[{epoch+1}/{num_epochs}], Train Loss:{train_losses[-1]:.4f}, Validation Loss:{valid_losses[-1]:.4f}")
return train_losses, valid_losses
2.3 Get Predictions¶
Multiple functions are defined to evaluate data. Below is a list of them.
def get_predictions(model_path, extra_loader):
if not isinstance(model_path, str):
model_state = model_path
else:
model_state = torch.load(model_path)
model = SmallVGG()
model.load_state_dict(model_state)
model.to(device)
model.eval()
pred_scores = [] # Prob. of predictions
true_labels = [] # Ground Truth
pred_labels = [] # Label of prediction, i.e., argmax(softmax(pred_scores))
with torch.no_grad():
for images, labels in tqdm(extra_loader):
images, labels = images.to(device), labels.to(device)
outputs = model(images)
pred_scores_batch = nn.functional.softmax(outputs, dim=-1)
pred_scores.extend(pred_scores_batch.cpu().tolist())
pred_labels.extend(outputs.argmax(dim=1).tolist())
true_labels.extend(labels.cpu().tolist())
return pred_scores, true_labels, pred_labels
2.4 Get Metrics¶
def get_metrics(true_labels, pred_labels):
accuracy = accuracy_score(true_labels, pred_labels)
precision = precision_score(true_labels, pred_labels, zero_division=1, average=None, labels=range(0,10))
recall = recall_score(true_labels, pred_labels, zero_division=1, average=None, labels=range(0,10))
f1 = f1_score(true_labels, pred_labels, zero_division=0, average=None, labels=range(0,10))
return accuracy, precision, recall, f1
def display_cm(true_labels, pred_labels):
cm = confusion_matrix(true_labels, pred_labels)
disp = ConfusionMatrixDisplay(confusion_matrix=cm, display_labels=range(0,10))
disp.plot(cmap=plt.cm.Blues)
plt.show()
display_cm(true_labels_cpu, pred_labels_cpu)
--------------------------------------------------------------------------- NameError Traceback (most recent call last) Cell In[117], line 1 ----> 1 display_cm(true_labels_cpu, pred_labels_cpu) NameError: name 'true_labels_cpu' is not defined
# Compute ROC AUC for each class
def get_roc_auc(true_labels_bin, pred_labels_bin):
roc_auc = dict()
for i in range(0, 10):
roc_auc[i] = roc_auc_score(true_labels_bin[:,i], np.array(pred_scores)[:, i])
return roc_auc
3. Experiments¶
3.0 Preparation¶
3.0.1 Plot Functions¶
The experiments will be a list of the following structures:
{
"HYPER_PARAM_1": combo[0],
"HYPER_PARAM_2": combo[1],
"train_losses": train_losses,
"test_losses": test_losses,
"model_state_dict": exp_model.state_dict()
}
Epoch-Loss Curves¶
def plot_el(loaded_experiments, hyper_param_names, n_rows=4, n_cols=4):
fig_size = (n_cols * 5, n_rows * 5)
n1, n2 = hyper_param_names
fig, axes = plt.subplots(nrows=n_rows, ncols=n_cols, figsize=fig_size)
# plt.tight_layout()
for i, ax in enumerate(axes.flat):
train_losses, valid_losses = loaded_experiments[i]["train_losses"], loaded_experiments[i]["test_losses"]
ax.plot(train_losses, label=f"TRL, min={np.min(train_losses):.3f}")
ax.plot(valid_losses, label=f"VAL, min={np.min(valid_losses):.3f} at step={np.argmin(valid_losses)}")
ax.set_xlabel("Epochs")
ax.set_ylabel("Loss")
ax.set_title(f"{n1}={loaded_experiments[i][n1]}, {n2}={loaded_experiments[i][n2]}")
ax.legend(loc="upper right")
plt.show()
Get Experiment Results¶
def get_experiment_results(loaded_experiments, test_hyperparam_names, extra_loader):
experiment_results = []
n1, n2 = test_hyperparam_names
for i, exp in enumerate(loaded_experiments):
pred_scores, true_labels, pred_labels = get_predictions(exp['model_state_dict'], extra_loader)
experiment_results.append({
n1: exp[n1],
n2: exp[n2],
"true_labels": true_labels,
"pred_labels": pred_labels,
"pred_scores": pred_scores
})
print(f"First 10 true labels:")
[print(num, end=" ") for num in true_labels[:10]]
print(f"...\n")
print(f"First 10 pred labels:")
[print(num, end=" ") for num in pred_labels[:10]]
print(f"...\n")
print(f"First 5 pred_scores:")
[print(num, end=" ") for num in pred_scores[:5]]
print(f"...\n")
# del pred_scores, true_labels, pred_lables
torch.cuda.empty_cache()
return experiment_results
Confusion Matrix¶
def plot_cm(experiment_results, hyper_param_names, n_rows=4, n_cols=4):
fig, axes = plt.subplots(n_rows, n_cols, figsize=(n_cols * 5, n_rows * 5))
axes = axes.flatten()
hparam_1, hparam_2 = hyper_param_names
for i, exp_rs in enumerate(experiment_results):
true_labels, pred_labels = exp_rs['true_labels'], exp_rs['pred_labels']
cm = confusion_matrix(true_labels, pred_labels)
disp = ConfusionMatrixDisplay(confusion_matrix=cm, display_labels=range(0,10))
disp.plot(ax=axes[i], cmap = plt.cm.Blues)
axes[i].set_title(f"Exp {i+1}: {hparam_1}={exp_rs[hparam_1]}, {hparam_2}={exp_rs[hparam_2]}")
plt.tight_layout()
plt.show()
Precision-Recall Curve¶
def plot_pr(experiment_results, hyper_param_names, n_rows=4, n_cols=4):
fig, axes = plt.subplots(n_rows,n_cols, figsize=(n_cols * 5, n_rows * 5))
axes = axes.flatten()
hparam_1, hparam_2 = hyper_param_names
accuracies = []
f1_scores = []
for i, exp_rs in enumerate(experiment_results):
true_labels, pred_labels, pred_scores = exp_rs['true_labels'], exp_rs['pred_labels'], exp_rs['pred_scores']
true_labels_bin, pred_labels_bin = label_binarize(true_labels, classes=range(0,10)), label_binarize(pred_labels, classes=range(0,10))
accuracy, precision, recall, f1 = get_metrics(true_labels, pred_labels)
accuracies.append(accuracy)
f1_scores.append(f1)
for j in range(0, 10):
# print(f"Class {j}: Prec:{precision[j]:.2f}, Recall:{recall[j]:.2f}, F_1 Score:{f1[j]:.2f}")
precision_i, recall_i, _ = precision_recall_curve(true_labels_bin[:, j], np.array(pred_scores)[:, j])
average_precision = average_precision_score(true_labels_bin[:, j], np.array(pred_scores)[:, j])
axes[i].step(recall_i, precision_i, where="post", label=f"Class {j} AP={average_precision:.2f}")
axes[i].set_title(f"PR-Curve {hparam_1}={exp_rs[hparam_1]}, {hparam_2}={exp_rs[hparam_2]}")
axes[i].legend()
axes[i].set_xlabel("Recall")
axes[i].set_ylabel("Precision")
# for j in range(i+1, 16):
# fig.delaxes(axes[j])
plt.tight_layout()
plt.show()
return accuracies, f1_scores
ROC-AUC Curve¶
def plot_rocauc(experiment_results, hyper_param_names, curve_type, n_rows=4, n_cols=4):
fig, axes = plt.subplots(n_rows, n_cols, figsize=(n_cols * 5, n_rows * 5))
axes = axes.flatten()
hparam_1, hparam_2 = hyper_param_names
for i, exp_rs in enumerate(experiment_results):
true_labels, pred_scores = exp_rs['true_labels'], exp_rs['pred_scores']
true_labels_bin = label_binarize(true_labels, classes=range(0, 10))
# All Classes' ROC curve & ROC Area Under Curve
fpr = dict()
tpr = dict()
roc_auc = dict()
for j in range(10):
fpr[j], tpr[j], _ = roc_curve(true_labels_bin[:, j], np.array(pred_scores)[:, j])
roc_auc[j] = auc(fpr[j], tpr[j])
# Macro-Average ROC & ROC-AUC
all_fpr = np.unique(np.concatenate([fpr[j] for j in range(10)]))
mean_tpr = np.zeros_like(all_fpr)
for j in range(10):
mean_tpr += np.interp(all_fpr, fpr[j], tpr[j])
mean_tpr /= 10
fpr["macro"] = all_fpr
tpr["macro"] = mean_tpr
roc_auc["macro"] = auc(fpr["macro"], tpr["macro"])
# Compute micro-average ROC curve and ROC area
fpr["micro"], tpr["micro"], _ = roc_curve(true_labels_bin.ravel(), np.array(pred_scores).ravel())
roc_auc["micro"] = auc(fpr["micro"], tpr["micro"])
# Plot only Macro or Micro ROC curves
if curve_type == "macro_micro":
axes[i].plot(fpr["macro"], tpr["macro"], label=f"Macro (AUC={roc_auc['macro']:.2f})")
axes[i].plot(fpr["micro"], tpr["micro"], label=f"Micro (AUC={roc_auc['micro']:.2f})")
elif curve_type == "all":
# Plot all ROC curves
for j in range(10):
axes[i].plot(fpr[j], tpr[j], label=f"Class {j} (AUC={roc_auc[j]:.2f})")
axes[i].plot([0, 1], [0, 1], "k--")
axes[i].set_xlabel("False Positive Rate")
axes[i].set_ylabel("True Positive Rate")
axes[i].set_title(f"ROC Curve {i+1}, {hparam_1}={exp_rs[hparam_1]}, {hparam_2}={exp_rs[hparam_2]}")
axes[i].legend(loc='lower right')
plt.tight_layout()
plt.show()
3.0.1 Datasets¶
def split_train_valid(train_dataset, train_ratio):
ori_len = len(train_dataset)
train_size = int(train_ratio * ori_len)
valid_size = ori_len - train_size
train_dataset_, valid_dataset_ = random_split(train_dataset, [train_size, valid_size])
return train_dataset_, valid_dataset_
3.1 Experiment 1: Optimizer¶
In the standard process of gradient descent, each update is proportional to the negative gradient (first-order derivative) of the loss function with respect to the parameter. In this traditional process, the learning rate is fixed, and it may cause problems.
- Oscillations. If locally, the learning rate is too high, the model will jump around the local minimum.
- Slow convergence. If locally, the learning rate is too low, the model will spend a lot of epochs to converge to a local minimum.
To solve this problem, we enable the learning rate to be adaptive by introducing the "momentum", a velocity-like term which accumulates past gradients in the direction of consistent descent.
- The velocity term is the weighted sum of previous gradients.
- ...such that the update direction does not only rely on the current gradient, but also on previous ones.
The update of velocity is represnted as: $$ v_t=\beta v_{t-1} + (1-\beta)\cdot\nabla J(\theta) $$ where $\beta$ is the momentum coefficient. In our experiments, $\beta$ will be fixed to $0.9$.
The update of parameters will be: $$ \theta_{t} = \theta_{t-1}-\eta\cdot v_{t} $$ In this experiment, we focus on the performance of different optimizers, each has its own optimized way to update the momentum. We will fix other variables, including transform, epoch number and learning rate, and only adjust the optimizers. There are a few optimizers to be chozen:
- Adaptive Moment Estimation (Adam)
- Stochastic Gradient Descent (SGD)
- Root Mean Square Propagation (RMSprop)
- Adam with Weight Decay (AdamW)
- Adaptive Gradient Algorithm (Adgrad)
- SGD with Momentum and Nesterove Accelerated Gradient
# Universal Train Dataset without splitting
exp1_universal_train_dataset = SVHNDataset(mat_file=os.path.join(path_dataset,"train_32x32.mat"))
exp1_mean, exp1_std = exp1_universal_train_dataset.get_meanstd()
print(f"Channel Means: {exp1_mean}")
print(f"Channel Stds: {exp1_std}")
Channel Means: [0.4376845359802246, 0.4437684714794159, 0.47280389070510864] Channel Stds: [0.19803018867969513, 0.2010156661272049, 0.19703581929206848]
Define changing & non-changing hyper parameters.
exp1_hyperparams = {
"num_epochs": 30,
"lr": 1e-5,
"criterion": nn.CrossEntropyLoss(),
"transform": A.Compose([
A.Normalize(mean=exp1_mean, std=exp1_std),
ToTensorV2()
])
}
exp1_models = [SmallVGG().to(device) for _ in range(0,6)]
candidate_optimizers = [
optim.Adam(exp1_models[0].parameters(), lr=exp1_hyperparams['lr']),
optim.SGD(exp1_models[1].parameters(), lr=exp1_hyperparams['lr'], momentum=0.9),
optim.RMSprop(exp1_models[2].parameters(), lr=exp1_hyperparams['lr']),
optim.AdamW(exp1_models[3].parameters(), lr=exp1_hyperparams['lr'], weight_decay=0.01),
optim.Adagrad(exp1_models[4].parameters(), lr=exp1_hyperparams['lr']),
optim.SGD(exp1_models[5].parameters(), lr=exp1_hyperparams['lr'], momentum=0.9, nesterov=True)]
Train, Validation and Test datasets.
# Train & Test Dataset
exp1_train_dataset = SVHNDataset(mat_file=os.path.join(path_dataset,"train_32x32.mat"), transform=exp1_hyperparams['transform'])
exp1_train_dataset, exp1_valid_dataset = split_train_valid(exp1_train_dataset, train_ratio=0.8)
# Test Dataset
exp1_test_dataset = SVHNDataset(mat_file=os.path.join(path_dataset,"test_32x32.mat"), transform=exp1_hyperparams['transform'])
print(f"Train Size:{exp1_train_dataset.__len__()}\nValidation Size:{exp1_valid_dataset.__len__()}\nTest Size:{exp1_test_dataset.__len__()}")
Train Size:58605 Validation Size:14652 Test Size:26032
Train, Validation and Test Data Loaders.
# Data Loaders
exp1_train_loader = DataLoader(exp1_train_dataset, batch_size=128, shuffle=True)
exp1_valid_loader = DataLoader(exp1_valid_dataset, batch_size=128, shuffle=True)
exp1_test_loader = DataLoader(exp1_test_dataset, batch_size=128, shuffle=False)
Run Experiments
def run_exp1(optimizers, models, hyper_params, train_loader, valid_loader):
experiments = []
for i, [optimizer, exp1_model] in enumerate(zip(optimizers, models)):
print(f"Experiment {i+1}. Running experiment on optimizer: {optimizer.__class__.__name__}")
criterion = hyper_params['criterion']
num_epochs = hyper_params['num_epochs']
train_losses, test_losses = train_and_evaluate(exp1_model, train_loader, valid_loader, criterion, optimizer, num_epochs)
experiments.append({
"optimizer": optimizer.__class__.__name__,
"others":"same",
"train_losses": train_losses,
"test_losses": test_losses,
"model_state_dict": exp1_model.state_dict()
})
del exp1_model, criterion, optimizer
torch.cuda.empty_cache()
return experiments
exp1 = run_exp1(candidate_optimizers, exp1_models, exp1_hyperparams, exp1_train_loader, exp1_valid_loader)
time_str = str(time.time()).replace(".","")
torch.save(exp1, f"./models/exp1_{time_str}.pth")
Load Experiments
Load Experiment objects and plot results.
exp1_loaded = torch.load("./models/exp1_17302273106995156.pth")
exp1_results = get_experiment_results(exp1_loaded, test_hyperparam_names=["optimizer", "others"], extra_loader=exp1_test_loader)
plot_el(exp1_loaded, ["optimizer", "others"], n_rows=1, n_cols=6)
plot_cm(exp1_results, ["optimizer", "others"], n_rows=1, n_cols=6)
exp1_accuracies, exp1_f1s = plot_pr(exp1_results, ["optimizer", "others"], n_rows=1, n_cols=6)
print(f"Accuracies:")
for acc in exp1_accuracies:
print(f"{acc:.3f}", end=", ")
print("\n")
print(f"F1 Score Lists:")
for f1 in exp1_f1s:
for val in f1:
print(f"{val:.3f}", end=", ")
print(f"Avg F1={np.mean(f1):.3f}")
Accuracies: 0.755, 0.196, 0.687, 0.704, 0.196, 0.196, F1 Score Lists: 0.710, 0.861, 0.825, 0.688, 0.776, 0.694, 0.680, 0.800, 0.574, 0.639, Avg F1=0.725 0.000, 0.328, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, Avg F1=0.033 0.634, 0.816, 0.810, 0.573, 0.707, 0.628, 0.521, 0.741, 0.462, 0.573, Avg F1=0.646 0.624, 0.846, 0.756, 0.619, 0.761, 0.657, 0.589, 0.749, 0.536, 0.562, Avg F1=0.670 0.000, 0.328, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, Avg F1=0.033 0.000, 0.328, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, Avg F1=0.033
plot_rocauc(exp1_results, ["optimizer", "others"], curve_type="all", n_rows=1, n_cols=6)
plot_rocauc(exp1_results, ["optimizer", "others"], curve_type="macro_micro", n_rows=1, n_cols=6)
3.2 Experiment 2: Epoch Number and Learning Rate¶
This experiment seeks to find the effect of different combinations of epoch numbers and learning rates on the training & testing performance of the neural network.
3.2.1 Experiment 2-1: Rough Search¶
In this sub-experiment, we perform a rough search on the epochs and learning rate. We promoted four possible values for both parameters: $$ \text{candidate epochs}=\{10, 15, 20, 25\} $$ $$ \text{candidate lr}=\{1.0\times 10^{-3},1.0\times 10^{-4},1.0\times 10^{-5},1.0\times 10^{-6}\} $$
# Universal Train Dataset without splitting
exp2_universal_train_dataset = SVHNDataset(mat_file=os.path.join(path_dataset,"train_32x32.mat"))
exp2_mean, exp2_std = exp2_universal_train_dataset.get_meanstd()
print(f"Channel Means: {exp2_mean}")
print(f"Channel Stds: {exp2_std}")
Channel Means: [0.4376845359802246, 0.4437684714794159, 0.47280389070510864] Channel Stds: [0.19803018867969513, 0.2010156661272049, 0.19703581929206848]
exp2_hyperparams = {
"criterion": nn.CrossEntropyLoss(),
"transform": A.Compose([
A.Normalize(mean=exp2_mean, std=exp2_std),
ToTensorV2()
]),
"optimizer":optim.Adam,
}
candidate_epochs = [10, 15, 20, 25]
candidate_lr = [1e-3, 1e-4, 1e-5, 1e-6]
torch.cuda.empty_cache()
# Train & Validation Datasets
exp2_train_dataset = SVHNDataset(mat_file=os.path.join(path_dataset,"train_32x32.mat"), transform=exp2_hyperparams['transform'])
exp2_train_dataset, exp2_valid_dataset = split_train_valid(exp2_train_dataset, train_ratio=0.8)
# Test Dataset
exp2_test_dataset = SVHNDataset(mat_file=os.path.join(path_dataset,"test_32x32.mat"), transform=exp2_hyperparams['transform'])
print(f"Train Size:{exp2_train_dataset.__len__()}\nValidation Size:{exp2_valid_dataset.__len__()}\nTest Size:{exp2_test_dataset.__len__()}")
Train Size:58605 Validation Size:14652 Test Size:26032
exp2_train_loader = DataLoader(exp2_train_dataset, batch_size=128, shuffle=True)
exp2_valid_loader = DataLoader(exp2_valid_dataset, batch_size=128, shuffle=True)
exp2_test_loader = DataLoader(exp2_valid_dataset, batch_size=128, shuffle=False)
def run_exp2(epochs, lr_list, hyper_params, train_loader, test_loader):
combinations = list(itertools.product(epochs, lr_list))
experiments = []
for i, combo in enumerate(combinations):
num_epochs, lr = combo
print(f"Running Exp {i+1}: num_epoch={num_epochs}, lr={lr}")
this_model = SmallVGG().to(device)
criterion = hyper_params['criterion']
optimizer = hyper_params['optimizer'](this_model.parameters(), lr=lr)
train_losses, test_losses = train_and_evaluate(this_model, train_loader, test_loader, criterion, optimizer, num_epochs)
experiments.append({
"num_epochs": num_epochs,
"lr": lr,
"train_losses": train_losses,
"test_losses": test_losses,
"model_state_dict": this_model.state_dict()
})
del this_model, criterion, optimizer
return experiments
exp2 = run_exp2(candidate_epochs, candidate_lr, exp2_hyperparams, exp2_train_loader, exp2_valid_loader)
time_str = str(time.time()).replace(".","")
torch.save(exp2, f"./models/exp2_1_{time_str}.pth")
exp2_loaded = torch.load("./models/exp2_1_1730229238268616.pth")
exp2_results = get_experiment_results(exp2_loaded, test_hyperparam_names=["num_epochs", "lr"], extra_loader=exp2_test_loader)
3.2.1-1 Epoch-Loss Curve¶
We found that the key to the training performance of a model is the learning rate. Epoch number only controls the progress of training.
From the perspective of learning rate (each column), only the learning rate of $1.0\times 10^{-3}$ shows a sign of convergence under each candidate epochs. With this learning rate, the model even overfitted under experiments with an epoch number over $15$. The best model we conclude from this rough selection is the one with the combination of $\text{num\_epoch}=10\land\text{lr}=1.0\times10^{-3}$. The minimum validation loss is $36.648$ at step $7$, which is the lowest of all $16$ samples. However, this doesn't mean that it is optimal since it may jump over a local minimum.
Moreover, as we inspect the performance on smaller learning rates, we found that they tend to converge in a way further epoch steps. Moreover, for the learning rate $1.0\times 10^{-6}$, the learning rate is too low that the model can not even fit under nearly-finite epochs.
plot_el(exp2_loaded, ["num_epochs", "lr"], n_rows=4, n_cols=4)
3.2.1-2 Confusion Matrix¶
In this rough search, the confusion matrix varies on different learning rates, and tends to be identical on different epochs.
Under a same epoch number, as leraning rate gets smaller, the confusion matrix gets "blurrer", meaning that the prediction is less accurate from the whole perspective. The learning rates under $1.0\times 10^{-4}$ are too low that the model can't converge in a reasonably number of epochs. For the lowest learning rate of $1.0\times 10^{-6}$, the model is not fitted at all. It classifies every number into 1, the number with the richest amount in the dataset.
plot_cm(exp2_results, ["num_epochs", "lr"], n_rows=4, n_cols=4)
3.2.1-3 Precision-Recall Curve¶
From a numerical perspective over the testing performance, the combination of $\text{num\_epoch}=10\land\text{lr}=1.0\times10^{-3}$ gives the highest accuracy of $0.920$, highest average $F_1$ score of $0.916$ and the lowest $F_1$ variance per-class of $0.019$.
exp2_accuracies, exp2_f1s = plot_pr(exp2_results, ["num_epochs", "lr"], n_rows=4, n_cols=4)
print(f"Accuracies:")
for acc in exp2_accuracies:
print(f"{acc:.3f}", end=", ")
print("\n")
print(f"F1 Score Lists:")
for f1 in exp2_f1s:
for val in f1:
print(f"{val:.3f}", end=", ")
print(f"Avg F1={np.mean(f1):.3f}, Std={np.std(f1):.3f}")
Accuracies: 0.920, 0.846, 0.498, 0.187, 0.910, 0.859, 0.501, 0.187, 0.909, 0.890, 0.583, 0.187, 0.907, 0.884, 0.691, 0.187, F1 Score Lists: 0.924, 0.937, 0.941, 0.904, 0.934, 0.910, 0.886, 0.932, 0.892, 0.898, Avg F1=0.916, Std=0.019 0.849, 0.895, 0.909, 0.795, 0.862, 0.808, 0.796, 0.868, 0.750, 0.793, Avg F1=0.833, Std=0.049 0.399, 0.683, 0.581, 0.412, 0.533, 0.438, 0.369, 0.503, 0.118, 0.250, Avg F1=0.429, Std=0.154 0.000, 0.315, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, Avg F1=0.031, Std=0.094 0.911, 0.931, 0.938, 0.895, 0.917, 0.897, 0.886, 0.915, 0.864, 0.885, Avg F1=0.904, Std=0.022 0.876, 0.889, 0.911, 0.819, 0.873, 0.836, 0.823, 0.876, 0.770, 0.825, Avg F1=0.850, Std=0.040 0.333, 0.713, 0.555, 0.265, 0.587, 0.506, 0.336, 0.557, 0.057, 0.059, Avg F1=0.397, Std=0.213 0.000, 0.315, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, Avg F1=0.031, Std=0.094 0.919, 0.929, 0.935, 0.895, 0.919, 0.899, 0.874, 0.922, 0.842, 0.903, Avg F1=0.904, Std=0.027 0.903, 0.928, 0.931, 0.864, 0.904, 0.873, 0.849, 0.906, 0.793, 0.863, Avg F1=0.881, Std=0.040 0.470, 0.741, 0.680, 0.442, 0.695, 0.550, 0.473, 0.631, 0.091, 0.442, Avg F1=0.521, Std=0.179 0.000, 0.315, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, Avg F1=0.031, Std=0.094 0.918, 0.927, 0.934, 0.895, 0.921, 0.887, 0.869, 0.911, 0.868, 0.889, Avg F1=0.902, Std=0.023 0.895, 0.924, 0.925, 0.856, 0.893, 0.871, 0.842, 0.904, 0.796, 0.844, Avg F1=0.875, Std=0.039 0.658, 0.812, 0.768, 0.601, 0.742, 0.633, 0.569, 0.763, 0.452, 0.605, Avg F1=0.660, Std=0.105 0.000, 0.315, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, Avg F1=0.031, Std=0.094
plot_rocauc(exp2_results, ["num_epochs", "lr"], curve_type="all", n_rows=4, n_cols=4)
plot_rocauc(exp2_results, ["num_epochs", "lr"], curve_type="macro_micro", n_rows=4, n_cols=4)
3.2.2 Experiment 2-2: Detailed¶
Previous sub-experiment tells that the best combination from all the listed ones is $\text{num\_epoch}=10 \land \text{lr}=1.0\times 10^{-3}$.
This is a rough solution, as it may jump over local minimums. We want to find a better learning rate around $1.0\times 10^{-3}$, with an even more detailed distinction between candidate values, so that it may reveal a missing local minimum without using too much epochs.
We conducted an excessive experiment, purposely seeking an overfitting point over the listed candidate learning rates. We do this by setting the epoch number to $50$.
exp2_2_hyperparams = {
"num_epoch": 50,
"criterion": nn.CrossEntropyLoss(),
"transform": A.Compose([
A.Normalize(mean=exp2_mean, std=exp2_std),
ToTensorV2()
]),
"optimizer":optim.Adam,
}
# More detailed candidate learning rates around 1e-4, that is 10e-4.
exp2_2_candidate_lr = [17e-4, 15e-4, 15e-4, 14e-4, 13e-4, 12e-4, 8e-4, 7e-4, 6e-4, 5e-4, 4e-4, 3e-4]
def run_exp2_2(lr_list, hyper_params, train_loader, test_loader):
experiments = []
for i, lr in enumerate(lr_list):
print(f"Running Exp {i+1}: lr={lr}")
this_model = SmallVGG().to(device)
num_epochs = hyper_params['num_epoch']
criterion = hyper_params['criterion']
optimizer = hyper_params['optimizer'](this_model.parameters(), lr=lr)
train_losses, test_losses = train_and_evaluate(this_model, train_loader, test_loader, criterion, optimizer, num_epochs)
experiments.append({
"num_epochs": num_epochs,
"lr": lr,
"train_losses": train_losses,
"test_losses": test_losses,
"model_state_dict": this_model.state_dict()
})
del this_model, criterion, optimizer
return experiments
exp2_2 = run_exp2_2(exp2_2_candidate_lr, exp2_2_hyperparams, exp2_train_loader, exp2_valid_loader)
time_str = str(time.time()).replace(".", "")
torch.save(exp2_2, f"./models/exp2_2_{time_str}.pth")
exp2_2_loaded = torch.load("./models/exp2_2_17302741969577262.pth")
exp2_2_results = get_experiment_results(exp2_2_loaded, test_hyperparam_names=["num_epochs", "lr"], extra_loader=exp2_test_loader)
3.2.2-1 Epoch-Loss Curve¶
By inspecting the epoch-loss curve, we found that all the experiments are overfitted. This means that $50$ epochs are enough for conducting the detailed search.
From all the detailed searches, the learning rate of $14\times10^{-4}$, that is 1.4e-3, yields the lowest validation loss of $36.259$ at step $5$, which is the overfitting point. We discovered a new local minimum that's been jumped over by learning rate of 1e-3, which previously yield a validation loss of $36.648$.
plot_el(exp2_2_loaded, ["num_epochs", "lr"], n_rows=2, n_cols=6)
3.2.2-2 Confusion Matrix¶
At a glance, from the perspective of confusion matrix, the testing performance on unknown data is roughly identical.
plot_cm(exp2_2_results, ["num_epochs", "lr"], n_rows=2, n_cols=6)
3.2.2-3 Precision-Recall Curve¶
By inspecting the evaluation metrics, we found our judgement correct. From all the over-fitted model, the model with learning rate of $7.0\times 10^{-4}$ yields the highest accuracy of $0.909$ and the highest average per-class $F_1$ score of $0.903$. Besides, the per-class $F_1$ score is also less variant under the learning rate of $7.0\times 10^{-4}$, with the standard deviation of $0.023$.
exp2_2_accuracies, exp2_2_f1s = plot_pr(exp2_2_results, ["num_epochs", "lr"], n_rows=2, n_cols=6)
print(f"Accuracies:")
for acc in exp2_2_accuracies:
print(f"{acc:.3f}", end=", ")
print("\n")
print(f"F1 Score Lists:")
for f1 in exp2_2_f1s:
for val in f1:
print(f"{val:.3f}", end=", ")
print(f"Avg F1={np.mean(f1):.3f}, Std={np.std(f1):.3f}")
Accuracies: 0.907, 0.900, 0.905, 0.905, 0.914, 0.911, 0.906, 0.911, 0.907, 0.912, 0.909, 0.908, F1 Score Lists: 0.918, 0.930, 0.931, 0.892, 0.924, 0.901, 0.870, 0.913, 0.859, 0.878, Avg F1=0.902, Std=0.024 0.912, 0.925, 0.926, 0.877, 0.912, 0.898, 0.854, 0.914, 0.852, 0.879, Avg F1=0.895, Std=0.026 0.915, 0.923, 0.929, 0.890, 0.913, 0.894, 0.877, 0.911, 0.868, 0.887, Avg F1=0.900, Std=0.019 0.917, 0.925, 0.932, 0.883, 0.910, 0.898, 0.876, 0.918, 0.849, 0.893, Avg F1=0.900, Std=0.024 0.924, 0.932, 0.935, 0.904, 0.921, 0.905, 0.891, 0.918, 0.862, 0.893, Avg F1=0.909, Std=0.021 0.921, 0.930, 0.935, 0.904, 0.916, 0.909, 0.878, 0.912, 0.861, 0.895, Avg F1=0.906, Std=0.022 0.913, 0.927, 0.938, 0.885, 0.923, 0.893, 0.871, 0.907, 0.864, 0.877, Avg F1=0.900, Std=0.024 0.915, 0.928, 0.942, 0.897, 0.922, 0.899, 0.885, 0.914, 0.860, 0.890, Avg F1=0.905, Std=0.023 0.909, 0.928, 0.932, 0.884, 0.921, 0.906, 0.880, 0.917, 0.845, 0.892, Avg F1=0.901, Std=0.025 0.916, 0.930, 0.935, 0.896, 0.925, 0.896, 0.890, 0.909, 0.876, 0.902, Avg F1=0.907, Std=0.018 0.918, 0.928, 0.935, 0.885, 0.918, 0.902, 0.887, 0.918, 0.861, 0.894, Avg F1=0.905, Std=0.022 0.914, 0.931, 0.938, 0.887, 0.919, 0.897, 0.880, 0.916, 0.848, 0.889, Avg F1=0.902, Std=0.026
3.2.2-4 ROC-AUC Curve¶
The ROC-AUC Curve under all the detailed candidate learning rates are roughly identical.
plot_rocauc(exp2_2_results, ["num_epochs", "lr"], curve_type="all", n_rows=2, n_cols=6)
plot_rocauc(exp2_2_results, ["num_epochs", "lr"], curve_type="macro_micro", n_rows=2, n_cols=6)
3.3 Experiment 3: Transform¶
exp3_1_hyperparams = {
"num_epochs": 15,
"lr": 1e-3,
"criterion": nn.CrossEntropyLoss(),
"optimizer": optim.Adam
}
# Group 1
candidate_angles = [15, 30, 45, 60]
candidate_crops = [0.08, 0.24, 0.40, 0.60] # Left Boundary
exp3_train_dataset = SVHNDataset(mat_file=os.path.join(path_dataset, "train_32x32.mat"))
exp3_test_dataset = SVHNDataset(mat_file=os.path.join(path_dataset, "test_32x32.mat"))
exp3_extra_dataset = SVHNDataset(mat_file=os.path.join(path_dataset, "extra_32x32.mat"))
print(f"Train Size:{exp3_train_dataset.__len__()}\nTest Size:{exp3_test_dataset.__len__()}\nExtra Size:{exp3_extra_dataset.__len__()}")
Train Size:73257 Test Size:26032 Extra Size:531131
3.3.1 Experiment 3-1: Angles & Crops¶
def run_exp3_1(angles, crops, hyper_params, train_dataset, test_dataset):
combinations = list(itertools.product(angles, crops))
experiments = []
for i, combo in enumerate(combinations):
angle, crop = combo
print(f"Running Exp {i+1}: angles={angle}, crop={crop}")
this_model = SmallVGG().to(device)
num_epochs = hyper_params['num_epochs']
lr = hyper_params['lr']
criterion = hyper_params['criterion']
optimizer = hyper_params['optimizer'](this_model.parameters(), lr=lr)
this_transform = A.Compose([
A.RandomResizedCrop(32, 32, scale=(crop, 1.0)),
A.Rotate(limit=angle),
A.Normalize(mean=norm_mean, std=norm_std),
ToTensorV2()
])
# Generate Dataset
print(f"Exp {i+1}: Generating dataset from transform")
train_dataset.transform = this_transform
test_dataset.transform = this_transform
train_loader = DataLoader(train_dataset, batch_size=128, shuffle=True)
test_loader = DataLoader(test_dataset, batch_size=128, shuffle=False)
# Train Model
train_losses, test_losses = train_and_evaluate(this_model,
train_loader,
test_loader,
criterion,
optimizer,
num_epochs)
experiments.append({
"angle": angle,
"crop": crop,
"train_losses": train_losses,
"test_losses": test_losses,
"model_state_dict": this_model.state_dict()
})
del this_model, criterion, optimizer
del train_loader, test_loader
torch.cuda.empty_cache()
return experiments
exp3_1 = run_exp3_1(candidate_angles, candidate_crops, exp3_1_hyperparams, exp3_train_dataset, exp3_test_dataset)
time_str = str(time.time()).replace(".","")
torch.save(exp3_1, f"./models/exp3_1_{time_str}.pth")
Running Exp 1: angles=15, crop=0.08 Exp 1: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.61it/s]
Epoch[1/15], Train Loss:261.5060, Test Loss:201.0278
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.42it/s]
Epoch[2/15], Train Loss:170.1129, Test Loss:147.9401
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.24it/s]
Epoch[3/15], Train Loss:139.7338, Test Loss:131.5334
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.80it/s]
Epoch[4/15], Train Loss:127.6843, Test Loss:119.5422
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.41it/s]
Epoch[5/15], Train Loss:121.4710, Test Loss:116.1567
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.77it/s]
Epoch[6/15], Train Loss:116.4647, Test Loss:115.7246
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.22it/s]
Epoch[7/15], Train Loss:113.9502, Test Loss:106.4324
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.89it/s]
Epoch[8/15], Train Loss:111.2306, Test Loss:106.8681
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.20it/s]
Epoch[9/15], Train Loss:109.8530, Test Loss:103.0799
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.07it/s]
Epoch[10/15], Train Loss:107.9646, Test Loss:103.5623
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.08it/s]
Epoch[11/15], Train Loss:107.4535, Test Loss:101.8924
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.71it/s]
Epoch[12/15], Train Loss:105.3729, Test Loss:99.0370
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.48it/s]
Epoch[13/15], Train Loss:105.0582, Test Loss:99.4525
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.33it/s]
Epoch[14/15], Train Loss:103.5093, Test Loss:98.3857
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.99it/s]
Epoch[15/15], Train Loss:103.3996, Test Loss:99.3310 Running Exp 2: angles=15, crop=0.24 Exp 2: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.90it/s]
Epoch[1/15], Train Loss:236.1950, Test Loss:168.8818
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.87it/s]
Epoch[2/15], Train Loss:142.8428, Test Loss:116.8797
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.90it/s]
Epoch[3/15], Train Loss:113.1784, Test Loss:103.5311
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.50it/s]
Epoch[4/15], Train Loss:100.4082, Test Loss:94.5276
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.70it/s]
Epoch[5/15], Train Loss:93.0474, Test Loss:84.7019
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.28it/s]
Epoch[6/15], Train Loss:87.4109, Test Loss:81.8887
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 35.82it/s]
Epoch[7/15], Train Loss:82.8098, Test Loss:77.5598
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.60it/s]
Epoch[8/15], Train Loss:81.2828, Test Loss:75.8507
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 35.88it/s]
Epoch[9/15], Train Loss:78.0170, Test Loss:76.2646
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.90it/s]
Epoch[10/15], Train Loss:77.0653, Test Loss:72.9959
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.14it/s]
Epoch[11/15], Train Loss:75.7438, Test Loss:70.5730
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.89it/s]
Epoch[12/15], Train Loss:74.0718, Test Loss:70.1219
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.39it/s]
Epoch[13/15], Train Loss:73.2526, Test Loss:69.0054
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.72it/s]
Epoch[14/15], Train Loss:72.8759, Test Loss:67.8361
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.86it/s]
Epoch[15/15], Train Loss:71.0463, Test Loss:68.0882 Running Exp 3: angles=15, crop=0.4 Exp 3: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.98it/s]
Epoch[1/15], Train Loss:238.6202, Test Loss:144.4784
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.87it/s]
Epoch[2/15], Train Loss:117.8920, Test Loss:94.2335
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.15it/s]
Epoch[3/15], Train Loss:88.3283, Test Loss:81.3414
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.30it/s]
Epoch[4/15], Train Loss:75.6279, Test Loss:68.6358
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.50it/s]
Epoch[5/15], Train Loss:68.7018, Test Loss:65.0433
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.56it/s]
Epoch[6/15], Train Loss:63.8435, Test Loss:58.4936
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.34it/s]
Epoch[7/15], Train Loss:60.8010, Test Loss:57.1383
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.69it/s]
Epoch[8/15], Train Loss:59.0633, Test Loss:55.0703
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.34it/s]
Epoch[9/15], Train Loss:56.8239, Test Loss:54.4075
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.90it/s]
Epoch[10/15], Train Loss:55.2775, Test Loss:51.2466
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.61it/s]
Epoch[11/15], Train Loss:54.1812, Test Loss:55.9096
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.28it/s]
Epoch[12/15], Train Loss:53.0266, Test Loss:50.8370
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.34it/s]
Epoch[13/15], Train Loss:52.1164, Test Loss:50.1073
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.07it/s]
Epoch[14/15], Train Loss:51.4253, Test Loss:48.3008
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.86it/s]
Epoch[15/15], Train Loss:50.8966, Test Loss:52.2237 Running Exp 4: angles=15, crop=0.6 Exp 4: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.18it/s]
Epoch[1/15], Train Loss:199.3159, Test Loss:99.8052
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.58it/s]
Epoch[2/15], Train Loss:82.8643, Test Loss:71.9677
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.91it/s]
Epoch[3/15], Train Loss:63.5704, Test Loss:64.6327
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.75it/s]
Epoch[4/15], Train Loss:56.2597, Test Loss:54.3290
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.56it/s]
Epoch[5/15], Train Loss:51.5064, Test Loss:48.5672
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.55it/s]
Epoch[6/15], Train Loss:48.6316, Test Loss:46.4294
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.13it/s]
Epoch[7/15], Train Loss:45.7590, Test Loss:44.7168
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.70it/s]
Epoch[8/15], Train Loss:44.2462, Test Loss:43.7604
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.01it/s]
Epoch[9/15], Train Loss:41.9103, Test Loss:41.5930
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.99it/s]
Epoch[10/15], Train Loss:41.5019, Test Loss:40.8065
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.14it/s]
Epoch[11/15], Train Loss:39.6637, Test Loss:37.4625
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.61it/s]
Epoch[12/15], Train Loss:38.9371, Test Loss:37.9654
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.00it/s]
Epoch[13/15], Train Loss:38.0382, Test Loss:36.8931
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.87it/s]
Epoch[14/15], Train Loss:37.4493, Test Loss:36.4331
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.44it/s]
Epoch[15/15], Train Loss:36.4876, Test Loss:37.5524 Running Exp 5: angles=30, crop=0.08 Exp 5: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.10it/s]
Epoch[1/15], Train Loss:275.2046, Test Loss:224.0560
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.60it/s]
Epoch[2/15], Train Loss:190.5530, Test Loss:164.5913
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.97it/s]
Epoch[3/15], Train Loss:156.3354, Test Loss:144.1799
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.97it/s]
Epoch[4/15], Train Loss:142.3773, Test Loss:137.0528
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.64it/s]
Epoch[5/15], Train Loss:136.3037, Test Loss:127.4181
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.08it/s]
Epoch[6/15], Train Loss:130.7595, Test Loss:125.6487
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.13it/s]
Epoch[7/15], Train Loss:126.9686, Test Loss:119.4266
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.60it/s]
Epoch[8/15], Train Loss:124.3396, Test Loss:118.5403
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.51it/s]
Epoch[9/15], Train Loss:122.4813, Test Loss:114.9661
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.57it/s]
Epoch[10/15], Train Loss:120.5036, Test Loss:115.4562
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.27it/s]
Epoch[11/15], Train Loss:118.4684, Test Loss:117.3322
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.90it/s]
Epoch[12/15], Train Loss:118.3023, Test Loss:116.4632
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.66it/s]
Epoch[13/15], Train Loss:117.0507, Test Loss:112.2545
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.31it/s]
Epoch[14/15], Train Loss:115.3733, Test Loss:110.5428
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.33it/s]
Epoch[15/15], Train Loss:114.7811, Test Loss:109.6120 Running Exp 6: angles=30, crop=0.24 Exp 6: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.16it/s]
Epoch[1/15], Train Loss:266.9389, Test Loss:194.2414
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.87it/s]
Epoch[2/15], Train Loss:155.5109, Test Loss:126.4608
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.34it/s]
Epoch[3/15], Train Loss:121.5722, Test Loss:111.7372
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.93it/s]
Epoch[4/15], Train Loss:108.5193, Test Loss:100.1074
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.03it/s]
Epoch[5/15], Train Loss:101.3210, Test Loss:96.0768
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.06it/s]
Epoch[6/15], Train Loss:95.7439, Test Loss:89.6330
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.87it/s]
Epoch[7/15], Train Loss:92.0405, Test Loss:85.4961
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.91it/s]
Epoch[8/15], Train Loss:89.8950, Test Loss:82.4442
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.09it/s]
Epoch[9/15], Train Loss:87.1899, Test Loss:83.7929
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.82it/s]
Epoch[10/15], Train Loss:86.0043, Test Loss:81.3218
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.94it/s]
Epoch[11/15], Train Loss:83.5289, Test Loss:82.0052
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.58it/s]
Epoch[12/15], Train Loss:82.3067, Test Loss:75.5613
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.11it/s]
Epoch[13/15], Train Loss:80.5349, Test Loss:79.5662
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.37it/s]
Epoch[14/15], Train Loss:80.6135, Test Loss:76.5861
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.90it/s]
Epoch[15/15], Train Loss:79.4848, Test Loss:77.3559 Running Exp 7: angles=30, crop=0.4 Exp 7: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.15it/s]
Epoch[1/15], Train Loss:217.5548, Test Loss:133.3803
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.87it/s]
Epoch[2/15], Train Loss:114.7234, Test Loss:96.5217
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.00it/s]
Epoch[3/15], Train Loss:92.1138, Test Loss:81.4443
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.66it/s]
Epoch[4/15], Train Loss:82.0800, Test Loss:78.8911
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.71it/s]
Epoch[5/15], Train Loss:75.8187, Test Loss:69.9560
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.96it/s]
Epoch[6/15], Train Loss:71.4162, Test Loss:66.0856
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.67it/s]
Epoch[7/15], Train Loss:68.7067, Test Loss:64.5188
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.77it/s]
Epoch[8/15], Train Loss:66.6067, Test Loss:64.1271
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.38it/s]
Epoch[9/15], Train Loss:64.3267, Test Loss:61.0001
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.67it/s]
Epoch[10/15], Train Loss:62.7726, Test Loss:60.0254
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.00it/s]
Epoch[11/15], Train Loss:61.7784, Test Loss:58.0675
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.76it/s]
Epoch[12/15], Train Loss:60.6613, Test Loss:60.4303
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.51it/s]
Epoch[13/15], Train Loss:59.9321, Test Loss:56.9359
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.22it/s]
Epoch[14/15], Train Loss:58.6858, Test Loss:53.6621
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.65it/s]
Epoch[15/15], Train Loss:57.5995, Test Loss:51.5253 Running Exp 8: angles=30, crop=0.6 Exp 8: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.89it/s]
Epoch[1/15], Train Loss:210.1614, Test Loss:118.7430
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.33it/s]
Epoch[2/15], Train Loss:98.0520, Test Loss:84.7792
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 37.03it/s]
Epoch[3/15], Train Loss:77.8603, Test Loss:70.9961
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.74it/s]
Epoch[4/15], Train Loss:68.5855, Test Loss:64.8881
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.19it/s]
Epoch[5/15], Train Loss:62.2285, Test Loss:56.2229
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 37.14it/s]
Epoch[6/15], Train Loss:57.9389, Test Loss:59.3211
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 37.31it/s]
Epoch[7/15], Train Loss:54.3443, Test Loss:52.8963
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 37.76it/s]
Epoch[8/15], Train Loss:52.5038, Test Loss:49.3112
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 35.86it/s]
Epoch[9/15], Train Loss:51.2319, Test Loss:49.9893
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.28it/s]
Epoch[10/15], Train Loss:49.4784, Test Loss:46.1553
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.60it/s]
Epoch[11/15], Train Loss:48.5230, Test Loss:46.1944
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.98it/s]
Epoch[12/15], Train Loss:46.6760, Test Loss:46.6419
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.75it/s]
Epoch[13/15], Train Loss:46.2224, Test Loss:44.2649
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.13it/s]
Epoch[14/15], Train Loss:45.2754, Test Loss:43.8261
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.22it/s]
Epoch[15/15], Train Loss:44.4078, Test Loss:43.0297 Running Exp 9: angles=45, crop=0.08 Exp 9: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.14it/s]
Epoch[1/15], Train Loss:280.5044, Test Loss:240.1144
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.47it/s]
Epoch[2/15], Train Loss:203.2110, Test Loss:175.1215
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.12it/s]
Epoch[3/15], Train Loss:164.9194, Test Loss:149.1841
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.95it/s]
Epoch[4/15], Train Loss:149.5268, Test Loss:143.2028
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.10it/s]
Epoch[5/15], Train Loss:141.0264, Test Loss:131.7502
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.54it/s]
Epoch[6/15], Train Loss:135.3766, Test Loss:132.8216
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.43it/s]
Epoch[7/15], Train Loss:130.7131, Test Loss:128.3838
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.24it/s]
Epoch[8/15], Train Loss:128.5763, Test Loss:122.2258
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.06it/s]
Epoch[9/15], Train Loss:127.0223, Test Loss:120.7556
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.56it/s]
Epoch[10/15], Train Loss:125.3888, Test Loss:118.2154
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.93it/s]
Epoch[11/15], Train Loss:122.2569, Test Loss:119.3883
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.26it/s]
Epoch[12/15], Train Loss:121.3993, Test Loss:115.6329
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.90it/s]
Epoch[13/15], Train Loss:119.6684, Test Loss:114.8020
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.66it/s]
Epoch[14/15], Train Loss:119.0040, Test Loss:116.8726
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.14it/s]
Epoch[15/15], Train Loss:118.0036, Test Loss:117.9713 Running Exp 10: angles=45, crop=0.24 Exp 10: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.75it/s]
Epoch[1/15], Train Loss:253.6932, Test Loss:188.7089
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.70it/s]
Epoch[2/15], Train Loss:158.3823, Test Loss:137.7284
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.11it/s]
Epoch[3/15], Train Loss:130.9987, Test Loss:121.4077
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.48it/s]
Epoch[4/15], Train Loss:118.0153, Test Loss:111.6952
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.06it/s]
Epoch[5/15], Train Loss:110.4603, Test Loss:106.9579
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.37it/s]
Epoch[6/15], Train Loss:105.3283, Test Loss:98.8407
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.16it/s]
Epoch[7/15], Train Loss:101.9695, Test Loss:94.4462
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.37it/s]
Epoch[8/15], Train Loss:97.5218, Test Loss:95.7446
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.67it/s]
Epoch[9/15], Train Loss:96.4391, Test Loss:91.7456
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.58it/s]
Epoch[10/15], Train Loss:93.6975, Test Loss:88.2562
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.52it/s]
Epoch[11/15], Train Loss:92.0711, Test Loss:86.8893
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.61it/s]
Epoch[12/15], Train Loss:90.4686, Test Loss:86.1701
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.60it/s]
Epoch[13/15], Train Loss:89.6186, Test Loss:85.1295
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.79it/s]
Epoch[14/15], Train Loss:88.3990, Test Loss:83.4672
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.07it/s]
Epoch[15/15], Train Loss:87.1852, Test Loss:82.7548 Running Exp 11: angles=45, crop=0.4 Exp 11: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.22it/s]
Epoch[1/15], Train Loss:257.4885, Test Loss:180.9128
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.40it/s]
Epoch[2/15], Train Loss:143.8207, Test Loss:118.7275
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.90it/s]
Epoch[3/15], Train Loss:110.2081, Test Loss:100.6459
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.35it/s]
Epoch[4/15], Train Loss:96.1737, Test Loss:88.6081
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.21it/s]
Epoch[5/15], Train Loss:88.6977, Test Loss:84.7917
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.29it/s]
Epoch[6/15], Train Loss:84.0752, Test Loss:78.5857
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.50it/s]
Epoch[7/15], Train Loss:79.1967, Test Loss:77.4861
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.56it/s]
Epoch[8/15], Train Loss:76.3396, Test Loss:72.2767
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.58it/s]
Epoch[9/15], Train Loss:74.7498, Test Loss:72.5802
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.67it/s]
Epoch[10/15], Train Loss:73.0331, Test Loss:71.8019
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.80it/s]
Epoch[11/15], Train Loss:71.2042, Test Loss:67.2791
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.94it/s]
Epoch[12/15], Train Loss:69.7242, Test Loss:65.7040
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.53it/s]
Epoch[13/15], Train Loss:68.2622, Test Loss:66.0965
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.78it/s]
Epoch[14/15], Train Loss:67.3484, Test Loss:63.5493
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.27it/s]
Epoch[15/15], Train Loss:65.7448, Test Loss:63.3813 Running Exp 12: angles=45, crop=0.6 Exp 12: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.51it/s]
Epoch[1/15], Train Loss:226.9495, Test Loss:134.7430
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.19it/s]
Epoch[2/15], Train Loss:103.8962, Test Loss:86.7670
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.64it/s]
Epoch[3/15], Train Loss:78.9248, Test Loss:73.6757
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.18it/s]
Epoch[4/15], Train Loss:69.2493, Test Loss:62.8773
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.44it/s]
Epoch[5/15], Train Loss:63.8473, Test Loss:59.3184
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.51it/s]
Epoch[6/15], Train Loss:59.5843, Test Loss:57.7639
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.20it/s]
Epoch[7/15], Train Loss:57.3130, Test Loss:53.4595
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.51it/s]
Epoch[8/15], Train Loss:55.1684, Test Loss:56.0973
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.07it/s]
Epoch[9/15], Train Loss:53.5153, Test Loss:49.1163
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.25it/s]
Epoch[10/15], Train Loss:51.8342, Test Loss:51.3045
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.20it/s]
Epoch[11/15], Train Loss:50.2519, Test Loss:47.4384
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.56it/s]
Epoch[12/15], Train Loss:50.0776, Test Loss:51.1840
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.34it/s]
Epoch[13/15], Train Loss:49.2334, Test Loss:50.8763
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.00it/s]
Epoch[14/15], Train Loss:48.0142, Test Loss:45.4874
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.18it/s]
Epoch[15/15], Train Loss:47.4366, Test Loss:44.7661 Running Exp 13: angles=60, crop=0.08 Exp 13: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.40it/s]
Epoch[1/15], Train Loss:273.2648, Test Loss:235.4841
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.34it/s]
Epoch[2/15], Train Loss:206.2288, Test Loss:181.0198
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.57it/s]
Epoch[3/15], Train Loss:173.6281, Test Loss:160.3423
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.70it/s]
Epoch[4/15], Train Loss:159.1017, Test Loss:153.0853
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.73it/s]
Epoch[5/15], Train Loss:152.0133, Test Loss:144.5928
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.28it/s]
Epoch[6/15], Train Loss:145.2907, Test Loss:138.4738
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.31it/s]
Epoch[7/15], Train Loss:140.1987, Test Loss:135.6789
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.23it/s]
Epoch[8/15], Train Loss:136.7185, Test Loss:131.8453
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.71it/s]
Epoch[9/15], Train Loss:133.9563, Test Loss:127.4475
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.68it/s]
Epoch[10/15], Train Loss:132.8140, Test Loss:126.8887
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.28it/s]
Epoch[11/15], Train Loss:130.3398, Test Loss:125.1631
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.38it/s]
Epoch[12/15], Train Loss:128.8992, Test Loss:126.6912
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.27it/s]
Epoch[13/15], Train Loss:127.5985, Test Loss:121.2971
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.50it/s]
Epoch[14/15], Train Loss:126.0572, Test Loss:120.9271
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.09it/s]
Epoch[15/15], Train Loss:125.3362, Test Loss:124.7142 Running Exp 14: angles=60, crop=0.24 Exp 14: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.52it/s]
Epoch[1/15], Train Loss:284.5449, Test Loss:265.0378
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.35it/s]
Epoch[2/15], Train Loss:211.8008, Test Loss:165.9173
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.58it/s]
Epoch[3/15], Train Loss:153.1203, Test Loss:140.1142
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.86it/s]
Epoch[4/15], Train Loss:132.2385, Test Loss:119.9374
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.74it/s]
Epoch[5/15], Train Loss:121.7565, Test Loss:114.4224
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.12it/s]
Epoch[6/15], Train Loss:114.9436, Test Loss:109.1825
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.64it/s]
Epoch[7/15], Train Loss:110.4517, Test Loss:107.1972
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.35it/s]
Epoch[8/15], Train Loss:106.2987, Test Loss:102.5439
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.35it/s]
Epoch[9/15], Train Loss:104.0969, Test Loss:97.5677
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.05it/s]
Epoch[10/15], Train Loss:102.5159, Test Loss:96.2565
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.28it/s]
Epoch[11/15], Train Loss:99.3443, Test Loss:94.3654
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.45it/s]
Epoch[12/15], Train Loss:97.2308, Test Loss:92.8501
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.50it/s]
Epoch[13/15], Train Loss:96.5124, Test Loss:92.4623
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.37it/s]
Epoch[14/15], Train Loss:95.4770, Test Loss:90.7161
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.32it/s]
Epoch[15/15], Train Loss:94.7934, Test Loss:88.6764 Running Exp 15: angles=60, crop=0.4 Exp 15: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.97it/s]
Epoch[1/15], Train Loss:261.4127, Test Loss:193.2169
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.34it/s]
Epoch[2/15], Train Loss:147.6261, Test Loss:123.9893
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.30it/s]
Epoch[3/15], Train Loss:108.7168, Test Loss:96.3248
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.73it/s]
Epoch[4/15], Train Loss:96.2003, Test Loss:84.7912
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.40it/s]
Epoch[5/15], Train Loss:88.2074, Test Loss:81.4018
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.86it/s]
Epoch[6/15], Train Loss:83.7404, Test Loss:77.4003
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.61it/s]
Epoch[7/15], Train Loss:80.7069, Test Loss:71.9837
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.97it/s]
Epoch[8/15], Train Loss:78.3637, Test Loss:70.5774
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.76it/s]
Epoch[9/15], Train Loss:75.4578, Test Loss:67.7206
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.85it/s]
Epoch[10/15], Train Loss:74.1966, Test Loss:68.4628
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.95it/s]
Epoch[11/15], Train Loss:72.2177, Test Loss:69.9053
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.08it/s]
Epoch[12/15], Train Loss:71.3242, Test Loss:67.4168
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.56it/s]
Epoch[13/15], Train Loss:70.2998, Test Loss:63.9883
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.53it/s]
Epoch[14/15], Train Loss:69.3350, Test Loss:64.8003
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.24it/s]
Epoch[15/15], Train Loss:67.6273, Test Loss:61.4801 Running Exp 16: angles=60, crop=0.6 Exp 16: Generating dataset from transform
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.36it/s]
Epoch[1/15], Train Loss:237.8173, Test Loss:148.2641
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.21it/s]
Epoch[2/15], Train Loss:116.4845, Test Loss:95.8477
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.99it/s]
Epoch[3/15], Train Loss:88.4812, Test Loss:80.6348
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.86it/s]
Epoch[4/15], Train Loss:77.7235, Test Loss:72.0927
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.15it/s]
Epoch[5/15], Train Loss:71.3916, Test Loss:66.8190
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.48it/s]
Epoch[6/15], Train Loss:67.1890, Test Loss:65.6475
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.05it/s]
Epoch[7/15], Train Loss:63.8747, Test Loss:58.7209
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.87it/s]
Epoch[8/15], Train Loss:61.3178, Test Loss:60.1422
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.52it/s]
Epoch[9/15], Train Loss:59.5554, Test Loss:56.2422
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.78it/s]
Epoch[10/15], Train Loss:58.0028, Test Loss:54.6090
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.51it/s]
Epoch[11/15], Train Loss:56.2025, Test Loss:55.5614
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.42it/s]
Epoch[12/15], Train Loss:55.2669, Test Loss:54.2610
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.77it/s]
Epoch[13/15], Train Loss:54.0181, Test Loss:53.1277
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.86it/s]
Epoch[14/15], Train Loss:53.5266, Test Loss:52.1733
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.87it/s]
Epoch[15/15], Train Loss:52.2984, Test Loss:55.0236
exp3_1_loaded = torch.load("./models/exp3_1_1730131987195526.pth")
exp3_1_results = get_experiment_results(exp3_1_loaded, test_hyperparam_names=["angle", "crop"], extra_loader=exp2_extra_loader)
D:\Temps\temp\ipykernel_68752\2438005985.py:1: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature.
exp3_1_loaded = torch.load("./models/exp3_1_1730131987195526.pth")
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:06<00:00, 34.38it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 4 1 8 7 1 7 7 7 3 7 ... First 5 pred_scores: [1.4064831077575946e-07, 0.00011279848695266992, 1.5159866961766966e-05, 1.7165482404379873e-06, 0.9998559951782227, 2.1228647710813675e-06, 1.3362932804739103e-06, 7.832702067389619e-06, 1.4053844097361434e-06, 1.5434172837558435e-06] [0.03180306404829025, 0.4185839295387268, 0.02551024965941906, 0.03235312178730965, 0.1896570324897766, 0.00957075972110033, 0.006793879438191652, 0.2431216537952423, 0.009060983546078205, 0.0335453525185585] [2.112846075874586e-08, 4.6940115794313897e-07, 1.3628233546114643e-06, 8.282203634735197e-05, 8.77164207935266e-09, 1.4058948636375135e-06, 2.3308601157623343e-05, 1.5347421822298202e-06, 0.9998881816864014, 8.895452197066334e-07] [3.6493094768275114e-08, 1.460717385270982e-06, 2.4321957425854634e-06, 1.2626055649889167e-06, 2.2122497966847732e-07, 4.538042830404265e-09, 4.3692324425137485e-07, 0.9999939203262329, 5.015929716023493e-08, 3.6751121257339037e-08] [0.018730266019701958, 0.7045648694038391, 0.00017669936642050743, 0.0015132176922634244, 0.0020648883655667305, 7.167350850068033e-05, 9.595266601536423e-05, 0.2720440924167633, 0.0005919582326896489, 0.00014636351261287928] ...
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:06<00:00, 34.36it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 4 7 8 4 1 7 1 7 3 0 ... First 5 pred_scores: [2.0182224034215324e-05, 0.005355987697839737, 0.0007133720209822059, 0.0003770111652556807, 0.9903033375740051, 5.057433736510575e-05, 3.956997170462273e-05, 0.003078151261433959, 1.4097527127887588e-05, 4.7732180973980576e-05] [0.02147713303565979, 0.3031976819038391, 0.053317200392484665, 0.049746885895729065, 0.03985697776079178, 0.023932544514536858, 0.08461983501911163, 0.37980082631111145, 0.03162527456879616, 0.012425631284713745] [2.043546010099817e-05, 7.556287164334208e-05, 0.0016411993419751525, 0.00032324803760275245, 4.3842073864652775e-06, 4.808251833310351e-05, 0.00015477798297069967, 1.0236520211037714e-05, 0.9973805546760559, 0.00034151485306210816] [0.00569797633215785, 0.12763331830501556, 0.0003282544494140893, 0.0019110975554212928, 0.7916438579559326, 0.00014230738452170044, 0.024179695174098015, 0.046430524438619614, 0.0014189337380230427, 0.0006140433833934367] [0.0009190419805236161, 0.9700970649719238, 0.0002922282146755606, 0.00023085846623871475, 0.010859143920242786, 8.526008605258539e-05, 3.0573060939786956e-05, 0.017066553235054016, 4.434372385730967e-05, 0.0003748943272512406] ...
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:06<00:00, 34.97it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 4 1 8 7 1 1 1 1 3 0 ... First 5 pred_scores: [5.525651886273408e-06, 0.003244614228606224, 0.0006503831245936453, 5.366866025724448e-05, 0.9957312941551208, 4.878462277702056e-05, 9.872703230939806e-05, 3.6178047594148666e-05, 0.00011152042861795053, 1.9343691747053526e-05] [0.01701025292277336, 0.4711743891239166, 0.0068757557310163975, 0.0065680695697665215, 0.01812058500945568, 0.001882858807221055, 0.0036758719943463802, 0.46403390169143677, 0.0017211936647072434, 0.008937081322073936] [0.016106002032756805, 0.0009662715019658208, 0.04833371937274933, 0.00842353142797947, 0.005780481733381748, 0.039252474904060364, 0.053723789751529694, 0.00040336622623726726, 0.5178942680358887, 0.3091161251068115] [3.204526421995979e-08, 0.0062753139063715935, 1.4809371350565925e-05, 1.7198237401316874e-05, 5.7432793255429715e-05, 1.1583284731386811e-07, 1.5591742339893244e-05, 0.9936178922653198, 4.7079484488676826e-07, 1.1559553740880801e-06] [0.00037273240741342306, 0.9900857210159302, 2.1628995455102995e-05, 0.0003162486827932298, 0.00042396553908474743, 0.0002078549878206104, 0.0002391721063759178, 0.008045517839491367, 0.0002759082999546081, 1.137711024057353e-05] ...
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:06<00:00, 36.81it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 4 1 8 7 1 7 1 1 2 0 ... First 5 pred_scores: [2.9765080853394466e-06, 0.0005042628035880625, 0.0030127926729619503, 7.534459291491657e-05, 0.9960286617279053, 6.1026683397358283e-05, 0.00020444155961740762, 8.515617082593963e-05, 8.772012733970769e-06, 1.653061553952284e-05] [0.033330705016851425, 0.49084827303886414, 0.029589412733912468, 0.03180508688092232, 0.002717816736549139, 0.0004189242608845234, 0.007390279322862625, 0.31179216504096985, 0.09107621759176254, 0.0010310987709090114] [6.02355066803284e-06, 0.0006331368931569159, 0.004683933686465025, 0.000261747365584597, 1.727759990899358e-05, 1.0070411008200608e-05, 0.0003484832704998553, 0.00017784489318728447, 0.9937586784362793, 0.00010274533269694075] [0.0024907132610678673, 0.004774706903845072, 2.4819670215947554e-05, 0.00017629892681725323, 0.0003953950945287943, 6.228317488421453e-06, 0.0453050434589386, 0.946723222732544, 4.8757447075331584e-05, 5.4831594752613455e-05] [1.221009915752802e-05, 0.9993798732757568, 4.121716301597189e-06, 8.197375427698717e-05, 0.00018101245223078877, 6.163662328617647e-05, 0.00010631871555233374, 0.00014360524073708802, 2.67024188360665e-05, 2.5714155071909772e-06] ...
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:06<00:00, 33.91it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 4 1 8 7 7 1 7 4 3 1 ... First 5 pred_scores: [0.00014887879660818726, 0.020086141303181648, 0.011565621942281723, 0.003160470863804221, 0.9609534740447998, 0.0010658403625711799, 0.00040785325109027326, 0.002180159091949463, 0.00010687166650313884, 0.0003246604755986482] [0.003914410714060068, 0.4775712490081787, 0.1502229869365692, 0.020113371312618256, 0.018521776422858238, 0.01615132763981819, 0.016511449590325356, 0.25544029474258423, 0.026133766397833824, 0.015419360250234604] [0.05729537084698677, 0.0029975799843668938, 0.02326829545199871, 0.008132182992994785, 0.0016669145552441478, 0.004738755989819765, 0.11119315773248672, 0.0024263006635010242, 0.6793326139450073, 0.10894875228404999] [3.629895273249417e-09, 1.0017990916821873e-06, 6.597811630371098e-09, 4.013194221386129e-09, 4.4059028425635915e-08, 5.019764118263659e-12, 2.4533416964622745e-10, 0.9999990463256836, 3.317964306082111e-11, 2.7112856404443164e-10] [1.1688735867210198e-05, 0.08758752048015594, 6.902387667651055e-06, 3.2633583032293245e-05, 0.0011748791439458728, 8.984567330116988e-07, 6.587982170458417e-06, 0.9111714363098145, 4.463134700927185e-06, 3.0956537102611037e-06] ...
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:06<00:00, 34.02it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 4 1 8 7 7 1 1 2 3 0 ... First 5 pred_scores: [2.6605306629789993e-05, 0.007838696241378784, 0.0012515035923570395, 0.00027857604436576366, 0.9897240400314331, 7.617428491357714e-05, 0.0001676578540354967, 0.00042830308666452765, 8.684688509674743e-05, 0.00012159889593021944] [0.06559410691261292, 0.6046677827835083, 0.04208051785826683, 0.006063533015549183, 0.03709845617413521, 0.00017419022333342582, 0.0004996178904548287, 0.012579692527651787, 0.005023436155170202, 0.22621864080429077] [0.06756053864955902, 0.0018873936496675014, 0.005447923205792904, 0.026589632034301758, 0.00012986130604986101, 0.006430545821785927, 0.40858590602874756, 0.0008083711145445704, 0.47606992721557617, 0.006489936728030443] [0.00012178789620520547, 0.005481535103172064, 1.5283892935258336e-05, 1.7552774806972593e-05, 0.004609161056578159, 2.8131612594961553e-08, 2.5852618819044437e-06, 0.9896793365478516, 5.167023846297525e-05, 2.1127949366928078e-05] [1.1765096132876351e-05, 0.002030162373557687, 5.041058102506213e-05, 7.005095085332869e-06, 1.8344417185289785e-05, 1.7659797890701157e-07, 3.897131136909593e-06, 0.9978744983673096, 1.426552216798882e-06, 2.215788526882534e-06] ...
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:06<00:00, 34.62it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 4 4 8 7 1 7 7 7 3 0 ... First 5 pred_scores: [0.0016687986208125949, 0.006634272634983063, 0.04617737606167793, 0.0023353206925094128, 0.924431324005127, 0.004753665998578072, 0.0016811992973089218, 0.0023135216906666756, 0.0014599967980757356, 0.008544554002583027] [0.015870150178670883, 0.24156060814857483, 0.003656246932223439, 0.005719190929085016, 0.6949741840362549, 0.0017287037335336208, 0.004464574158191681, 0.023400265723466873, 0.0032486431300640106, 0.005377477500587702] [1.2414210770472778e-09, 6.981086926316493e-07, 8.345858987013344e-06, 0.00011871085735037923, 6.18854940626079e-08, 1.9638156345536117e-07, 7.898189323896077e-06, 1.4957272753690631e-07, 0.9998612403869629, 2.621061639729305e-06] [0.06452324986457825, 0.013112373650074005, 0.0006105300853960216, 0.00017294920689892024, 0.2535353899002075, 8.497393650941376e-07, 0.0020163438748568296, 0.6645770072937012, 0.0012950684176757932, 0.0001562304823892191] [0.21114514768123627, 0.41046321392059326, 0.001620753319002688, 0.0097266910597682, 0.0019190856255590916, 0.0016925844829529524, 0.06613646447658539, 0.2771311104297638, 0.00973300077021122, 0.010432039387524128] ...
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:06<00:00, 34.56it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 2 7 6 7 1 7 7 7 3 0 ... First 5 pred_scores: [6.483113975264132e-05, 0.02617633156478405, 0.9114415049552917, 0.0048982915468513966, 0.019094396382570267, 0.0018359959358349442, 0.0004954489995725453, 0.033507540822029114, 0.001439186162315309, 0.0010464631486684084] [6.781506090192124e-05, 0.04971254616975784, 0.019052455201745033, 0.00012034989049425349, 0.0002869169693440199, 2.4602153644082136e-05, 2.71711378445616e-05, 0.9303671717643738, 0.00010949916759273037, 0.00023157772375270724] [0.030535826459527016, 0.00065958546474576, 0.001014184090308845, 0.00011645971244433895, 3.423498128540814e-05, 0.001118923770263791, 0.7835494875907898, 3.355018998263404e-05, 0.1714164763689041, 0.01152122113853693] [7.224221221235894e-09, 0.0007657785899937153, 1.1043717904613004e-06, 1.3739162341153133e-06, 6.2994226937007625e-06, 3.475807375252771e-08, 1.0200637916568667e-06, 0.9992243051528931, 1.068012789318118e-08, 8.387583072533289e-09] [0.00020628665515687317, 0.8470728993415833, 9.706459968583658e-05, 0.00010419034515507519, 0.00018019380513578653, 3.0294975204014918e-06, 0.00029129546601325274, 0.15194685757160187, 6.82285099173896e-05, 3.0015118682058528e-05] ...
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:05<00:00, 42.58it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 4 1 2 7 7 1 7 4 3 0 ... First 5 pred_scores: [0.0002249570970889181, 0.022906742990016937, 0.0177496038377285, 0.005723132751882076, 0.9360774159431458, 0.00834350474178791, 0.003402945352718234, 0.0024503683671355247, 0.00037870806409046054, 0.002742679789662361] [0.09864036738872528, 0.22107474505901337, 0.05844929814338684, 0.13654352724552155, 0.05616621673107147, 0.11108417063951492, 0.043080925941467285, 0.14052534103393555, 0.056810300797224045, 0.07762514799833298] [0.0018288862193003297, 0.003374028019607067, 0.5254546999931335, 0.053928181529045105, 0.0009635789319872856, 0.002637851983308792, 0.003555990755558014, 0.0013339836150407791, 0.2591773569583893, 0.14774544537067413] [0.0007197080412879586, 0.006538832560181618, 0.002656287048012018, 0.005491473712027073, 0.017662789672613144, 0.00039417773950845003, 0.002520319540053606, 0.9633978009223938, 0.00047632778296247125, 0.0001423502544639632] [0.0002248297241749242, 0.05915294960141182, 0.001413652440533042, 6.0287398810032755e-05, 0.03689972311258316, 6.467631465056911e-06, 5.460098691401072e-06, 0.9019597172737122, 4.0819228161126375e-05, 0.00023603875888511539] ...
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:06<00:00, 35.42it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 4 2 8 7 7 7 1 1 5 0 ... First 5 pred_scores: [2.914589636304754e-08, 0.0008508206810802221, 0.0008722578641027212, 1.9891698684659787e-05, 0.9982244372367859, 7.406957593047991e-06, 9.014515853777993e-07, 2.179508373956196e-05, 2.092263002850814e-06, 2.89908115291837e-07] [0.01915029063820839, 0.15890544652938843, 0.27269643545150757, 0.11073502153158188, 0.012512916699051857, 0.003739676671102643, 0.0031804246827960014, 0.2051072120666504, 0.10435207188129425, 0.1096205785870552] [0.00016811910609249026, 4.780886956723407e-05, 0.00024119805311784148, 0.0001072921950253658, 6.001271799505048e-07, 7.689437916269526e-05, 0.03252856060862541, 2.8312719223322347e-05, 0.9666798710823059, 0.00012140339094912633] [0.00852180551737547, 0.26356595754623413, 0.12366767972707748, 0.05908928066492081, 0.006280963774770498, 0.011168583296239376, 0.0008806203259155154, 0.5154536962509155, 0.0004714576352853328, 0.010899939574301243] [0.000805130519438535, 0.006617440842092037, 1.809623427106999e-05, 0.00014029898738954216, 0.0008574273088015616, 1.1444323718023952e-05, 2.350240265514003e-06, 0.9915423393249512, 1.4021791230334202e-06, 4.035363417642657e-06] ...
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:06<00:00, 38.28it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 4 7 8 7 1 7 7 7 3 0 ... First 5 pred_scores: [4.594157144310884e-05, 0.001574822934344411, 0.00820460170507431, 0.0002731005661189556, 0.9858530759811401, 9.52748887357302e-05, 0.0003614446031861007, 0.0031347782351076603, 0.00018817620002664626, 0.0002688080712687224] [0.24698501825332642, 0.08505468815565109, 0.015969837084412575, 0.00260521681047976, 0.03520243987441063, 0.0004840922192670405, 0.0067552984692156315, 0.467732697725296, 0.0017173775704577565, 0.13749335706233978] [1.0030299790741992e-06, 0.00040675379568710923, 0.02783540077507496, 4.2046274757012725e-05, 0.0014531518099829555, 3.3120231819339097e-05, 1.5285610061255284e-05, 5.2764808060601354e-05, 0.9670482873916626, 0.003112190403044224] [0.00010158284567296505, 0.11298731714487076, 0.07357220351696014, 0.049686823040246964, 0.0020438535138964653, 0.0015538351144641638, 0.0005543519509956241, 0.7585766911506653, 0.0004713288217317313, 0.00045199348824098706] [0.001030503655783832, 0.96080082654953, 8.824739779811352e-06, 1.8796409904098255e-06, 0.020350879058241844, 2.517853545214166e-06, 0.003017381764948368, 0.014618627727031708, 0.0001583074772497639, 1.0269986887578852e-05] ...
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:06<00:00, 36.96it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 3 4 8 4 1 1 1 1 3 0 ... First 5 pred_scores: [0.010088158771395683, 0.017085710540413857, 0.019995873793959618, 0.7594417929649353, 0.08855155110359192, 0.009988616220653057, 0.025506166741251945, 0.031804900616407394, 0.034476809203624725, 0.0030604496132582426] [0.023052792996168137, 0.16782692074775696, 0.0645916536450386, 0.0951981320977211, 0.2453615963459015, 0.014951322227716446, 0.05500718206167221, 0.2431851178407669, 0.04742629826068878, 0.043398965150117874] [2.465177658450557e-06, 0.0001637720997678116, 2.6346011509303935e-05, 0.0016450828406959772, 9.933350156643428e-06, 0.000577703642193228, 0.003910087049007416, 2.0759760445798747e-05, 0.9935279488563538, 0.00011590111535042524] [0.024826372042298317, 0.1450871080160141, 0.014209969900548458, 0.0024392344057559967, 0.445220410823822, 0.0013687157770618796, 0.22597244381904602, 0.10563267767429352, 0.0322679802775383, 0.002975068986415863] [0.008395561948418617, 0.6973749995231628, 0.008029806427657604, 0.007045292761176825, 0.05737769976258278, 0.0008125067688524723, 0.0018332094186916947, 0.21507421135902405, 0.0029219589196145535, 0.0011346905957907438] ...
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:06<00:00, 38.28it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 4 1 8 1 1 1 1 1 3 0 ... First 5 pred_scores: [3.4999986382899806e-05, 0.0068273949436843395, 0.0008238906157203019, 0.00023115549993235618, 0.9782301187515259, 1.4796582945564296e-05, 1.7093050701078027e-05, 0.013350083492696285, 0.00010113201278727502, 0.00036935799289494753] [0.1427883803844452, 0.45712172985076904, 0.046888697892427444, 0.0385059118270874, 0.09939292073249817, 0.023047346621751785, 0.045982327312231064, 0.07252300530672073, 0.033627573400735855, 0.04012211784720421] [6.377521799549868e-07, 2.607579745017574e-06, 0.0014857691712677479, 0.0004436323943082243, 1.2488598599702527e-07, 5.777782234872575e-07, 2.3254251573234797e-05, 2.039447934976124e-07, 0.9979573488235474, 8.582761802244931e-05] [0.03747524693608284, 0.49025556445121765, 0.057948220521211624, 0.020853472873568535, 0.18975523114204407, 0.020639343187212944, 0.06852464377880096, 0.08708591759204865, 0.019178146496415138, 0.008284250274300575] [0.0005220513558015227, 0.9341861009597778, 0.0014284016797319055, 0.00011810748401330784, 0.005209850147366524, 1.7124326404882595e-05, 4.992174945073202e-05, 0.05819239094853401, 0.00015895084652584046, 0.00011719060421455652] ...
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:06<00:00, 35.27it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 3 7 8 7 7 7 7 7 3 0 ... First 5 pred_scores: [0.0012721659149974585, 0.028686663135886192, 0.010021635331213474, 0.8651491403579712, 0.02601820044219494, 0.041853997856378555, 0.005926155485212803, 0.01187352929264307, 0.007987547665834427, 0.001210951479151845] [0.006480725482106209, 0.06773257255554199, 0.03989848867058754, 0.08859630674123764, 0.007634063716977835, 0.0004091088194400072, 0.00048807915300130844, 0.7718868255615234, 0.003942753653973341, 0.012931084260344505] [0.09364408999681473, 7.57426314521581e-05, 0.08270396292209625, 0.00650341622531414, 5.352727748686448e-05, 0.00015678202908020467, 0.02924981713294983, 0.00022503397485706955, 0.46626099944114685, 0.3211267292499542] [0.2222094088792801, 0.0800739973783493, 0.0015906542539596558, 0.00015329226152971387, 0.045525308698415756, 9.151661288342439e-06, 0.007446009200066328, 0.6417227387428284, 0.0005016883369535208, 0.0007677071844227612] [7.492931035812944e-05, 0.09766610711812973, 1.2138303645770065e-05, 3.630307037383318e-05, 0.0058419425040483475, 1.7990382730204146e-06, 6.443746769946301e-06, 0.8963307738304138, 3.0804239941062406e-06, 2.6456977138877846e-05] ...
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:06<00:00, 34.78it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 4 1 8 7 7 1 7 1 3 0 ... First 5 pred_scores: [0.0001420140906702727, 0.00713851023465395, 0.003791375085711479, 0.005049004219472408, 0.9780939221382141, 0.002496036933735013, 0.0004309658834245056, 0.0009765718132257462, 0.0005876566283404827, 0.0012939583975821733] [0.025841131806373596, 0.6686941385269165, 0.004654822405427694, 0.0009682995732873678, 0.04681430384516716, 0.00045565434265881777, 0.006434406153857708, 0.23315496742725372, 0.0014618063578382134, 0.011520527303218842] [4.902361183667381e-07, 1.851441652434005e-06, 3.840727458737092e-06, 1.549099397379905e-05, 5.313769868564577e-09, 8.401132163271541e-07, 0.0015765554271638393, 4.408968550251302e-07, 0.998396098613739, 4.318570972827729e-06] [1.062832222231691e-08, 1.7454757426094147e-06, 1.702081817711587e-06, 3.2365063962203067e-09, 5.650847469951259e-07, 2.656726783012431e-10, 1.749733229416961e-07, 0.9999957084655762, 3.1697080515868947e-09, 1.7473993085825867e-10] [0.060522302985191345, 0.20429813861846924, 0.0033803186379373074, 0.0011707558296620846, 0.06632325053215027, 0.0002731532440520823, 0.024951128289103508, 0.6370859146118164, 0.0017485168064013124, 0.0002465782454237342] ...
100%|████████████████████████████████████████████████████████████████████████████████| 235/235 [00:06<00:00, 34.02it/s]
First 10 true labels: 4 7 8 7 1 1 7 4 3 0 ... First 10 pred labels: 3 2 8 4 1 1 7 1 3 0 ... First 5 pred_scores: [0.0044592441990971565, 0.042769648134708405, 0.017423609271645546, 0.48083269596099854, 0.2857661545276642, 0.016019774600863457, 0.0024708809796720743, 0.09462128579616547, 0.04565499350428581, 0.009981700219213963] [0.007901892066001892, 0.11065695434808731, 0.33118826150894165, 0.01720629818737507, 0.02725224755704403, 0.0024205881636589766, 0.0015218387125059962, 0.1989704817533493, 0.008079438470304012, 0.2948019206523895] [2.154678213628358e-06, 9.494098662798933e-07, 0.0008331053541041911, 0.0007748861680738628, 4.600620741257444e-06, 1.1393633485567989e-06, 1.546573366795201e-05, 4.373363537979458e-07, 0.9982931017875671, 7.40791583666578e-05] [0.08549793809652328, 0.037647660821676254, 0.015229734592139721, 0.005118135828524828, 0.7212458848953247, 0.0011257296428084373, 0.052018772810697556, 0.059428323060274124, 0.013863448984920979, 0.008824433200061321] [0.0005010085296817124, 0.7530115842819214, 0.0011468544835224748, 0.002201158320531249, 0.0027488265186548233, 0.0003672640596050769, 0.00038091265014372766, 0.2349562793970108, 0.0033517838455736637, 0.0013344050385057926] ...
plot_el(exp3_1_loaded, ["angle", "crop"], n_rows=4, n_cols=4)
plot_cm(exp3_1_results, ["angle", "crop"], n_rows=4, n_cols=4)
exp1_accuracies, exp1_f1s = plot_pr(exp3_1_results, ["angle", "crop"], n_rows=4, n_cols=4)
print(f"Accuracies:")
for acc in exp1_accuracies:
print(f"{acc:.3f}", end=", ")
print("\n")
print(f"F1 Score Lists:")
for f1 in exp1_f1s:
for val in f1:
print(f"{val:.3f}", end=", ")
print(f"Avg F1={np.mean(f1):.3f}")
Accuracies: 0.747, 0.750, 0.719, 0.675, 0.762, 0.763, 0.736, 0.675, 0.766, 0.759, 0.723, 0.672, 0.753, 0.746, 0.730, 0.662, F1 Score Lists: 0.773, 0.772, 0.783, 0.732, 0.747, 0.729, 0.726, 0.693, 0.729, 0.737, Avg F1=0.742 0.751, 0.757, 0.776, 0.751, 0.725, 0.775, 0.734, 0.703, 0.741, 0.742, Avg F1=0.746 0.721, 0.723, 0.755, 0.730, 0.674, 0.738, 0.701, 0.695, 0.713, 0.703, Avg F1=0.715 0.678, 0.673, 0.715, 0.687, 0.625, 0.713, 0.648, 0.645, 0.671, 0.676, Avg F1=0.673 0.763, 0.769, 0.783, 0.767, 0.749, 0.792, 0.739, 0.744, 0.741, 0.727, Avg F1=0.758 0.764, 0.757, 0.799, 0.772, 0.755, 0.783, 0.744, 0.747, 0.751, 0.728, Avg F1=0.760 0.724, 0.708, 0.775, 0.747, 0.701, 0.782, 0.740, 0.723, 0.741, 0.737, Avg F1=0.738 0.678, 0.661, 0.689, 0.698, 0.654, 0.709, 0.665, 0.651, 0.682, 0.667, Avg F1=0.675 0.782, 0.787, 0.779, 0.756, 0.748, 0.776, 0.716, 0.764, 0.758, 0.750, Avg F1=0.762 0.770, 0.748, 0.794, 0.763, 0.735, 0.788, 0.731, 0.745, 0.743, 0.757, Avg F1=0.757 0.736, 0.706, 0.767, 0.736, 0.684, 0.747, 0.688, 0.710, 0.720, 0.747, Avg F1=0.724 0.668, 0.648, 0.713, 0.680, 0.626, 0.720, 0.664, 0.675, 0.676, 0.680, Avg F1=0.675 0.762, 0.763, 0.779, 0.738, 0.738, 0.763, 0.738, 0.748, 0.724, 0.740, Avg F1=0.749 0.746, 0.733, 0.788, 0.750, 0.716, 0.779, 0.724, 0.725, 0.754, 0.734, Avg F1=0.745 0.726, 0.712, 0.766, 0.755, 0.699, 0.752, 0.711, 0.711, 0.742, 0.733, Avg F1=0.731 0.645, 0.641, 0.708, 0.684, 0.610, 0.700, 0.651, 0.660, 0.665, 0.656, Avg F1=0.662
plot_rocauc(exp3_1_results, ["angle", "crop"], curve_type="all", n_rows=4, n_cols=4)
plot_rocauc(exp3_1_results, ["angle", "crop"], curve_type="macro_micro", n_rows=4, n_cols=4)
3.3.2 Experiment 3-2: Ratios & Bias¶
exp3_2_hyperparams = {
"num_epoch": 15,
"lr": 1e-3,
"criterion": nn.CrossEntropyLoss(),
"optimizer": optim.Adam,
"crop": 0.08,
"angle": 45
}
# Group 2
candidate_ratios = [0.25, 0.42, 0.58, 0.75]
candidate_channel_biases = [0, 32, 64, 128]
TA_norm_mean: List[float] = [0.4377, 0.4438, 0.4728]
"""norm_mean prepared by TA of CISC-3024"""
TA_norm_std: List[float] = [0.1980, 0.2010, 0.1970]
"""norm_std prepared by TA of CISC-3024"""
FULL_BIAS_norm_mean: Tuple[List[float], List[float], List[float], List[float]] = (
[0.43011287, 0.42947713, 0.44553235], [0.482172, 0.47879672, 0.49193108],
[0.5299231, 0.52622163, 0.5367366], [0.57701516, 0.57407594, 0.5768523])
"""norm_mean of all pixel values in 3 mat files in folder "SVHN_mat" """
FULL_BIAS_norm_std: Tuple[List[float], List[float], List[float], List[float]] = (
[0.1968019, 0.19883512, 0.19997141], [0.19747807, 0.19839704, 0.1997657],
[0.2061324, 0.20671913, 0.20880404], [0.2491606, 0.24821031, 0.2517171])
"""norm_std of all pixel values in 3 mat files in folder "SVHN_mat" """
TRAIN_BIAS_norm_mean: Tuple[List[float], List[float], List[float], List[float]] = (
[0.4359728, 0.4420371, 0.47095722], [0.48945808, 0.49221805, 0.51708484],
[0.5346211, 0.53527725, 0.55649185], [0.25027722, 0.25132284, 0.25705674])
"""norm_mean of only "train_32x32.mat" """
TRAIN_BIAS_norm_std: Tuple[List[float], List[float], List[float], List[float]] = (
[0.19725639, 0.20023046, 0.1962663], [0.19868314, 0.20101954, 0.19824031],
[0.207664, 0.20977464, 0.20946072], [0.5766456, 0.5753719, 0.5847802])
"""norm_std of only "train_32x32.mat" """
class AddBiasTransform:
def __init__(self, bias: Union[int, Tuple[int, int]]) -> None:
if isinstance(bias, tuple):
self.bias1 = bias[0]
self.bias2 = bias[1]
else:
self.bias1 = 0
self.bias2 = bias
def __call__(self, img: np.ndarray) -> np.ndarray:
_dtype = img.dtype
bias_value = random.randint(self.bias1, self.bias2)
img = (img.astype(np.int16) + bias_value) % 256
return img.astype(_dtype)
def run_exp3_2(ratios, biases, hyper_params, train_dataset, test_dataset):
combinations = list(itertools.product(ratios, biases))
experiments = []
for i, combo in enumerate(combinations):
ratio, bias = combo
print(f"Running Exp {i+1}: ratio={ratio}, bias={bias}")
this_model = SmallVGG().to(device)
num_epochs = hyper_params['num_epoch']
lr = hyper_params['lr']
criterion = hyper_params['criterion']
optimizer = hyper_params['optimizer'](this_model.parameters(), lr=lr)
this_transform = A.Compose([
A.Lambda(image=lambda img, **kwargs: AddBiasTransform(bias)(img)), # Lambda customized transform block
A.RandomResizedCrop(32, 32, scale=(hyper_params['crop'], 1.0), ratio=(ratio, (1.0 / ratio))),
A.Rotate(limit=hyper_params['angle']),
A.Normalize(meand=FULL_BIAS_norm_mean[i % 4], std=FULL_BIAS_norm_std[i % 4]),
ToTensorV2()
])
train_dataset.transform = this_transform
test_dataset.transform = this_transform
train_loader = DataLoader(train_dataset, batch_size=128, shuffle=True)
test_loader = DataLoader(test_dataset, batch_size=128, shuffle=False)
train_losses, test_losses = train_and_evaluate(this_model,
train_loader,
test_loader,
criterion,
optimizer,
num_epochs)
experiments.append({
"ratio": ratio,
"bias": bias,
"train_losses": train_losses,
"test_losses": test_losses,
"model_state_dict": this_model.state_dict(),
})
del this_model, criterion, optimizer
del train_loader, test_loader, this_transform
torch.cuda.empty_cache()
return experiments
exp3_2 = run_exp3_2(candidate_ratios, candidate_channel_biases, exp3_2_hyperparams, exp3_train_dataset, exp3_test_dataset)
time_str = str(time.time()).replace(".", "")
torch.save(exp3_2, f"./models/exp3_2_{time_str}.pth")
D:\Temps\temp\ipykernel_91216\2610225581.py:15: UserWarning: Using lambda is incompatible with multiprocessing. Consider using regular functions or partial(). A.Lambda(image=lambda img, **kwargs: AddBiasTransform(bias)(img)), # Lambda customized transform block D:\Temps\temp\ipykernel_91216\2610225581.py:18: UserWarning: Argument 'meand' is not valid and will be ignored. A.Normalize(meand=FULL_BIAS_norm_mean[i % 4], std=FULL_BIAS_norm_std[i % 4]),
Running Exp 1: ratio=0.25, bias=0
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.47it/s]
Epoch[1/15], Train Loss:285.6010, Test Loss:279.3851
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:14<00:00, 38.39it/s]
Epoch[2/15], Train Loss:263.1288, Test Loss:239.4954
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.82it/s]
Epoch[3/15], Train Loss:227.2084, Test Loss:217.0606
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.75it/s]
Epoch[4/15], Train Loss:208.5853, Test Loss:198.5745
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.40it/s]
Epoch[5/15], Train Loss:199.0653, Test Loss:191.1926
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.53it/s]
Epoch[6/15], Train Loss:192.6096, Test Loss:189.2662
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 41.36it/s]
Epoch[7/15], Train Loss:187.6228, Test Loss:181.0330
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.04it/s]
Epoch[8/15], Train Loss:184.7993, Test Loss:178.2949
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 41.89it/s]
Epoch[9/15], Train Loss:181.9867, Test Loss:178.4260
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 41.66it/s]
Epoch[10/15], Train Loss:180.1346, Test Loss:173.9601
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 41.65it/s]
Epoch[11/15], Train Loss:177.5119, Test Loss:173.5049
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.65it/s]
Epoch[12/15], Train Loss:175.3553, Test Loss:170.5024
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.24it/s]
Epoch[13/15], Train Loss:174.1597, Test Loss:169.7406
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.79it/s]
Epoch[14/15], Train Loss:172.7873, Test Loss:167.5622
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.35it/s]
Epoch[15/15], Train Loss:171.8026, Test Loss:168.5402 Running Exp 2: ratio=0.25, bias=32
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.26it/s]
Epoch[1/15], Train Loss:277.5726, Test Loss:262.8051
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.22it/s]
Epoch[2/15], Train Loss:246.7672, Test Loss:232.3377
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 41.73it/s]
Epoch[3/15], Train Loss:217.5564, Test Loss:205.3693
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.93it/s]
Epoch[4/15], Train Loss:202.3003, Test Loss:196.7972
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.62it/s]
Epoch[5/15], Train Loss:193.7929, Test Loss:189.2403
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.20it/s]
Epoch[6/15], Train Loss:187.1649, Test Loss:182.9470
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.13it/s]
Epoch[7/15], Train Loss:184.8341, Test Loss:179.5369
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.40it/s]
Epoch[8/15], Train Loss:181.0484, Test Loss:176.4138
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.51it/s]
Epoch[9/15], Train Loss:179.4008, Test Loss:174.5864
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.37it/s]
Epoch[10/15], Train Loss:177.0245, Test Loss:175.1944
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.43it/s]
Epoch[11/15], Train Loss:174.9093, Test Loss:169.4020
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.27it/s]
Epoch[12/15], Train Loss:171.7483, Test Loss:168.9148
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.82it/s]
Epoch[13/15], Train Loss:171.3499, Test Loss:166.0027
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.59it/s]
Epoch[14/15], Train Loss:170.6870, Test Loss:166.4546
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.01it/s]
Epoch[15/15], Train Loss:168.9554, Test Loss:164.7334 Running Exp 3: ratio=0.25, bias=64
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.12it/s]
Epoch[1/15], Train Loss:285.5163, Test Loss:274.5473
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.70it/s]
Epoch[2/15], Train Loss:259.3058, Test Loss:239.5582
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.28it/s]
Epoch[3/15], Train Loss:232.1720, Test Loss:218.4600
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.49it/s]
Epoch[4/15], Train Loss:215.6465, Test Loss:208.3050
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.23it/s]
Epoch[5/15], Train Loss:206.6534, Test Loss:200.1827
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.93it/s]
Epoch[6/15], Train Loss:199.2504, Test Loss:193.1748
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.56it/s]
Epoch[7/15], Train Loss:193.8546, Test Loss:190.6244
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.13it/s]
Epoch[8/15], Train Loss:190.3395, Test Loss:184.4144
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.91it/s]
Epoch[9/15], Train Loss:186.9274, Test Loss:181.1888
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.25it/s]
Epoch[10/15], Train Loss:184.0670, Test Loss:181.3349
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.10it/s]
Epoch[11/15], Train Loss:181.4889, Test Loss:177.6448
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.42it/s]
Epoch[12/15], Train Loss:180.2908, Test Loss:175.7398
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.61it/s]
Epoch[13/15], Train Loss:178.6861, Test Loss:173.2231
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.32it/s]
Epoch[14/15], Train Loss:177.2070, Test Loss:173.3442
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.54it/s]
Epoch[15/15], Train Loss:176.0379, Test Loss:170.5740 Running Exp 4: ratio=0.25, bias=128
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.03it/s]
Epoch[1/15], Train Loss:281.5624, Test Loss:263.2846
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.59it/s]
Epoch[2/15], Train Loss:256.8004, Test Loss:242.9493
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.43it/s]
Epoch[3/15], Train Loss:232.2553, Test Loss:219.0204
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.36it/s]
Epoch[4/15], Train Loss:213.9537, Test Loss:202.6296
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.18it/s]
Epoch[5/15], Train Loss:204.5752, Test Loss:198.3849
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.96it/s]
Epoch[6/15], Train Loss:198.3398, Test Loss:193.0239
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.39it/s]
Epoch[7/15], Train Loss:194.2362, Test Loss:187.5654
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.75it/s]
Epoch[8/15], Train Loss:191.3654, Test Loss:186.3733
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.25it/s]
Epoch[9/15], Train Loss:190.1163, Test Loss:185.2710
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.44it/s]
Epoch[10/15], Train Loss:187.4197, Test Loss:184.7369
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.27it/s]
Epoch[11/15], Train Loss:185.6226, Test Loss:181.5178
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.38it/s]
Epoch[12/15], Train Loss:183.9997, Test Loss:178.7287
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.45it/s]
Epoch[13/15], Train Loss:182.8408, Test Loss:176.7361
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.65it/s]
Epoch[14/15], Train Loss:179.7695, Test Loss:174.8288
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.15it/s]
Epoch[15/15], Train Loss:179.3338, Test Loss:175.2575 Running Exp 5: ratio=0.42, bias=0
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.42it/s]
Epoch[1/15], Train Loss:277.0365, Test Loss:248.9602
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.69it/s]
Epoch[2/15], Train Loss:229.0852, Test Loss:204.9259
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.18it/s]
Epoch[3/15], Train Loss:196.3066, Test Loss:184.7824
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.34it/s]
Epoch[4/15], Train Loss:181.3148, Test Loss:171.4765
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.76it/s]
Epoch[5/15], Train Loss:172.8257, Test Loss:167.8591
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.66it/s]
Epoch[6/15], Train Loss:167.4741, Test Loss:160.3568
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.57it/s]
Epoch[7/15], Train Loss:163.6433, Test Loss:154.6211
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.21it/s]
Epoch[8/15], Train Loss:159.9462, Test Loss:155.4043
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.37it/s]
Epoch[9/15], Train Loss:157.5609, Test Loss:151.2020
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.39it/s]
Epoch[10/15], Train Loss:156.2889, Test Loss:149.0188
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.19it/s]
Epoch[11/15], Train Loss:154.3912, Test Loss:149.5503
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.79it/s]
Epoch[12/15], Train Loss:152.5283, Test Loss:146.1476
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.69it/s]
Epoch[13/15], Train Loss:150.7667, Test Loss:145.6809
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.78it/s]
Epoch[14/15], Train Loss:151.0697, Test Loss:146.3899
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.33it/s]
Epoch[15/15], Train Loss:148.3164, Test Loss:143.7830 Running Exp 6: ratio=0.42, bias=32
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.05it/s]
Epoch[1/15], Train Loss:283.3236, Test Loss:264.4868
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.55it/s]
Epoch[2/15], Train Loss:236.6771, Test Loss:216.6570
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.30it/s]
Epoch[3/15], Train Loss:203.3738, Test Loss:190.3916
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.26it/s]
Epoch[4/15], Train Loss:186.3018, Test Loss:178.2955
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.48it/s]
Epoch[5/15], Train Loss:178.0821, Test Loss:171.9847
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.02it/s]
Epoch[6/15], Train Loss:170.8841, Test Loss:167.4282
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.31it/s]
Epoch[7/15], Train Loss:167.9860, Test Loss:163.4340
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.04it/s]
Epoch[8/15], Train Loss:164.6695, Test Loss:161.2200
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:11<00:00, 51.94it/s]
Epoch[9/15], Train Loss:160.9252, Test Loss:156.6880
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.18it/s]
Epoch[10/15], Train Loss:159.4691, Test Loss:156.0856
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.36it/s]
Epoch[11/15], Train Loss:157.6812, Test Loss:153.6034
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.89it/s]
Epoch[12/15], Train Loss:155.5358, Test Loss:152.6922
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.59it/s]
Epoch[13/15], Train Loss:154.8015, Test Loss:152.4604
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.88it/s]
Epoch[14/15], Train Loss:152.9183, Test Loss:150.9840
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.51it/s]
Epoch[15/15], Train Loss:152.8156, Test Loss:155.1455 Running Exp 7: ratio=0.42, bias=64
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.46it/s]
Epoch[1/15], Train Loss:282.6076, Test Loss:258.4053
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.09it/s]
Epoch[2/15], Train Loss:235.3796, Test Loss:216.3170
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.95it/s]
Epoch[3/15], Train Loss:204.2299, Test Loss:193.9331
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.10it/s]
Epoch[4/15], Train Loss:187.6430, Test Loss:180.2789
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.54it/s]
Epoch[5/15], Train Loss:179.1342, Test Loss:178.3607
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.26it/s]
Epoch[6/15], Train Loss:173.1791, Test Loss:166.7317
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.27it/s]
Epoch[7/15], Train Loss:169.2916, Test Loss:166.2375
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.13it/s]
Epoch[8/15], Train Loss:166.6997, Test Loss:163.1749
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.79it/s]
Epoch[9/15], Train Loss:163.1462, Test Loss:159.8800
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.25it/s]
Epoch[10/15], Train Loss:161.7531, Test Loss:155.2308
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.76it/s]
Epoch[11/15], Train Loss:159.6967, Test Loss:153.2128
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.02it/s]
Epoch[12/15], Train Loss:157.4075, Test Loss:151.6287
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.09it/s]
Epoch[13/15], Train Loss:155.8322, Test Loss:153.4908
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.19it/s]
Epoch[14/15], Train Loss:155.0289, Test Loss:149.5961
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.96it/s]
Epoch[15/15], Train Loss:153.4852, Test Loss:149.4652 Running Exp 8: ratio=0.42, bias=128
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.41it/s]
Epoch[1/15], Train Loss:285.1389, Test Loss:273.6799
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.74it/s]
Epoch[2/15], Train Loss:250.6317, Test Loss:227.5979
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.10it/s]
Epoch[3/15], Train Loss:215.8908, Test Loss:202.3353
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.77it/s]
Epoch[4/15], Train Loss:199.1574, Test Loss:187.3448
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.05it/s]
Epoch[5/15], Train Loss:189.5199, Test Loss:182.6071
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.70it/s]
Epoch[6/15], Train Loss:182.0144, Test Loss:175.1253
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.04it/s]
Epoch[7/15], Train Loss:178.1716, Test Loss:174.6061
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.51it/s]
Epoch[8/15], Train Loss:173.5888, Test Loss:166.4977
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.69it/s]
Epoch[9/15], Train Loss:170.6262, Test Loss:165.7048
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.84it/s]
Epoch[10/15], Train Loss:168.7482, Test Loss:164.0337
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 44.86it/s]
Epoch[11/15], Train Loss:166.5893, Test Loss:160.8959
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.09it/s]
Epoch[12/15], Train Loss:164.5327, Test Loss:159.4502
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.23it/s]
Epoch[13/15], Train Loss:162.8528, Test Loss:161.0181
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.31it/s]
Epoch[14/15], Train Loss:162.1272, Test Loss:155.2010
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.80it/s]
Epoch[15/15], Train Loss:161.8845, Test Loss:154.7546 Running Exp 9: ratio=0.58, bias=0
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.28it/s]
Epoch[1/15], Train Loss:276.1178, Test Loss:236.1619
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 44.99it/s]
Epoch[2/15], Train Loss:211.7544, Test Loss:189.0962
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 44.38it/s]
Epoch[3/15], Train Loss:180.5074, Test Loss:166.1278
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.65it/s]
Epoch[4/15], Train Loss:164.0274, Test Loss:154.8013
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 44.23it/s]
Epoch[5/15], Train Loss:155.9787, Test Loss:147.0392
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.11it/s]
Epoch[6/15], Train Loss:150.4543, Test Loss:142.6933
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 44.21it/s]
Epoch[7/15], Train Loss:145.9969, Test Loss:139.6051
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.98it/s]
Epoch[8/15], Train Loss:143.1562, Test Loss:139.2055
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.93it/s]
Epoch[9/15], Train Loss:140.4109, Test Loss:133.6836
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 44.85it/s]
Epoch[10/15], Train Loss:138.4189, Test Loss:131.8453
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 43.49it/s]
Epoch[11/15], Train Loss:136.5204, Test Loss:129.7945
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.99it/s]
Epoch[12/15], Train Loss:136.5671, Test Loss:132.4676
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.80it/s]
Epoch[13/15], Train Loss:134.0543, Test Loss:128.5069
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.94it/s]
Epoch[14/15], Train Loss:132.9259, Test Loss:128.7415
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.07it/s]
Epoch[15/15], Train Loss:132.5392, Test Loss:125.4043 Running Exp 10: ratio=0.58, bias=32
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 42.85it/s]
Epoch[1/15], Train Loss:286.1558, Test Loss:283.9790
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.78it/s]
Epoch[2/15], Train Loss:261.5400, Test Loss:224.1544
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.91it/s]
Epoch[3/15], Train Loss:194.9654, Test Loss:181.0201
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.61it/s]
Epoch[4/15], Train Loss:174.0839, Test Loss:167.9352
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 46.70it/s]
Epoch[5/15], Train Loss:163.1055, Test Loss:154.8540
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.58it/s]
Epoch[6/15], Train Loss:157.3319, Test Loss:150.2334
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.05it/s]
Epoch[7/15], Train Loss:152.3981, Test Loss:147.1227
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.17it/s]
Epoch[8/15], Train Loss:149.5164, Test Loss:148.2921
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 45.74it/s]
Epoch[9/15], Train Loss:146.9265, Test Loss:143.5351
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:13<00:00, 41.85it/s]
Epoch[10/15], Train Loss:143.4521, Test Loss:139.4308
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:12<00:00, 44.84it/s]
Epoch[11/15], Train Loss:142.3732, Test Loss:139.5186
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:14<00:00, 39.71it/s]
Epoch[12/15], Train Loss:140.3962, Test Loss:133.9285
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.17it/s]
Epoch[13/15], Train Loss:138.7451, Test Loss:135.2442
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.75it/s]
Epoch[14/15], Train Loss:138.2962, Test Loss:134.0785
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.42it/s]
Epoch[15/15], Train Loss:137.6410, Test Loss:134.1070 Running Exp 11: ratio=0.58, bias=64
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.50it/s]
Epoch[1/15], Train Loss:286.2199, Test Loss:283.8015
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.13it/s]
Epoch[2/15], Train Loss:263.8363, Test Loss:229.4499
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.74it/s]
Epoch[3/15], Train Loss:212.0950, Test Loss:193.3062
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.90it/s]
Epoch[4/15], Train Loss:187.4531, Test Loss:180.2970
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.37it/s]
Epoch[5/15], Train Loss:171.5119, Test Loss:163.6442
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.29it/s]
Epoch[6/15], Train Loss:162.7032, Test Loss:157.5588
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.41it/s]
Epoch[7/15], Train Loss:157.4095, Test Loss:151.9074
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.21it/s]
Epoch[8/15], Train Loss:153.4065, Test Loss:150.9182
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.01it/s]
Epoch[9/15], Train Loss:149.1631, Test Loss:143.2221
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.28it/s]
Epoch[10/15], Train Loss:147.2651, Test Loss:143.6302
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.95it/s]
Epoch[11/15], Train Loss:144.6846, Test Loss:144.4631
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.31it/s]
Epoch[12/15], Train Loss:143.8642, Test Loss:138.1613
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.48it/s]
Epoch[13/15], Train Loss:141.6553, Test Loss:139.6836
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.96it/s]
Epoch[14/15], Train Loss:141.6157, Test Loss:135.6981
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.88it/s]
Epoch[15/15], Train Loss:139.2405, Test Loss:134.2186 Running Exp 12: ratio=0.58, bias=128
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.09it/s]
Epoch[1/15], Train Loss:282.2087, Test Loss:257.6377
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.59it/s]
Epoch[2/15], Train Loss:231.5072, Test Loss:205.7457
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.84it/s]
Epoch[3/15], Train Loss:196.2753, Test Loss:181.2400
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.69it/s]
Epoch[4/15], Train Loss:179.6031, Test Loss:171.2407
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.46it/s]
Epoch[5/15], Train Loss:169.2366, Test Loss:164.4875
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.26it/s]
Epoch[6/15], Train Loss:163.9771, Test Loss:156.9209
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.13it/s]
Epoch[7/15], Train Loss:158.9209, Test Loss:152.2904
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.71it/s]
Epoch[8/15], Train Loss:155.5872, Test Loss:150.6064
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.16it/s]
Epoch[9/15], Train Loss:151.9472, Test Loss:145.4820
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.36it/s]
Epoch[10/15], Train Loss:149.8573, Test Loss:145.0581
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.31it/s]
Epoch[11/15], Train Loss:148.3998, Test Loss:143.3682
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.96it/s]
Epoch[12/15], Train Loss:147.5959, Test Loss:140.5172
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.82it/s]
Epoch[13/15], Train Loss:145.8100, Test Loss:142.0457
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.08it/s]
Epoch[14/15], Train Loss:144.5702, Test Loss:137.9952
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.39it/s]
Epoch[15/15], Train Loss:142.4231, Test Loss:140.9035 Running Exp 13: ratio=0.75, bias=0
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.22it/s]
Epoch[1/15], Train Loss:265.9617, Test Loss:215.8434
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.07it/s]
Epoch[2/15], Train Loss:187.5279, Test Loss:162.7467
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.05it/s]
Epoch[3/15], Train Loss:158.6252, Test Loss:149.3893
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.97it/s]
Epoch[4/15], Train Loss:145.5767, Test Loss:137.1275
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:20<00:00, 27.80it/s]
Epoch[5/15], Train Loss:139.1034, Test Loss:132.1765
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:20<00:00, 27.98it/s]
Epoch[6/15], Train Loss:133.7504, Test Loss:127.5196
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.74it/s]
Epoch[7/15], Train Loss:130.6003, Test Loss:127.5223
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:20<00:00, 28.24it/s]
Epoch[8/15], Train Loss:127.9039, Test Loss:122.7382
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.33it/s]
Epoch[9/15], Train Loss:124.4469, Test Loss:122.5740
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:20<00:00, 28.03it/s]
Epoch[10/15], Train Loss:124.3173, Test Loss:118.6418
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.70it/s]
Epoch[11/15], Train Loss:123.0512, Test Loss:119.8673
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:20<00:00, 28.29it/s]
Epoch[12/15], Train Loss:120.7475, Test Loss:116.8347
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.86it/s]
Epoch[13/15], Train Loss:120.2872, Test Loss:113.7680
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.47it/s]
Epoch[14/15], Train Loss:118.1377, Test Loss:113.2712
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.21it/s]
Epoch[15/15], Train Loss:116.7879, Test Loss:115.0822 Running Exp 14: ratio=0.75, bias=32
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.87it/s]
Epoch[1/15], Train Loss:279.6169, Test Loss:247.8377
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.88it/s]
Epoch[2/15], Train Loss:203.3650, Test Loss:175.7769
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.34it/s]
Epoch[3/15], Train Loss:164.9750, Test Loss:157.2982
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.68it/s]
Epoch[4/15], Train Loss:152.5269, Test Loss:145.0399
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.64it/s]
Epoch[5/15], Train Loss:144.1912, Test Loss:140.1902
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.74it/s]
Epoch[6/15], Train Loss:139.5601, Test Loss:139.4209
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:20<00:00, 27.48it/s]
Epoch[7/15], Train Loss:135.2978, Test Loss:133.3723
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.70it/s]
Epoch[8/15], Train Loss:131.8095, Test Loss:130.0859
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.41it/s]
Epoch[9/15], Train Loss:129.8850, Test Loss:125.5295
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.68it/s]
Epoch[10/15], Train Loss:128.3161, Test Loss:123.5431
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.54it/s]
Epoch[11/15], Train Loss:125.9413, Test Loss:124.1799
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 27.16it/s]
Epoch[12/15], Train Loss:125.2987, Test Loss:120.6217
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.97it/s]
Epoch[13/15], Train Loss:123.7624, Test Loss:122.9981
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.95it/s]
Epoch[14/15], Train Loss:122.7708, Test Loss:121.8475
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.93it/s]
Epoch[15/15], Train Loss:121.5330, Test Loss:116.5920 Running Exp 15: ratio=0.75, bias=64
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.38it/s]
Epoch[1/15], Train Loss:282.3106, Test Loss:251.0905
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:20<00:00, 27.67it/s]
Epoch[2/15], Train Loss:213.0863, Test Loss:180.5358
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.84it/s]
Epoch[3/15], Train Loss:172.3861, Test Loss:166.4396
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.40it/s]
Epoch[4/15], Train Loss:156.5993, Test Loss:152.4034
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:20<00:00, 28.39it/s]
Epoch[5/15], Train Loss:149.0111, Test Loss:143.1128
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:20<00:00, 28.36it/s]
Epoch[6/15], Train Loss:142.8192, Test Loss:140.6232
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:20<00:00, 27.58it/s]
Epoch[7/15], Train Loss:138.8363, Test Loss:134.9869
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:20<00:00, 28.17it/s]
Epoch[8/15], Train Loss:135.2780, Test Loss:128.2047
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.90it/s]
Epoch[9/15], Train Loss:133.2226, Test Loss:129.0277
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:20<00:00, 27.60it/s]
Epoch[10/15], Train Loss:131.0527, Test Loss:124.6489
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:20<00:00, 27.93it/s]
Epoch[11/15], Train Loss:130.4037, Test Loss:126.0701
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.64it/s]
Epoch[12/15], Train Loss:127.1032, Test Loss:122.8239
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.56it/s]
Epoch[13/15], Train Loss:127.1497, Test Loss:123.4502
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:20<00:00, 27.31it/s]
Epoch[14/15], Train Loss:125.7343, Test Loss:121.7185
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.18it/s]
Epoch[15/15], Train Loss:124.1018, Test Loss:117.9798 Running Exp 16: ratio=0.75, bias=128
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.23it/s]
Epoch[1/15], Train Loss:286.0056, Test Loss:281.0018
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:22<00:00, 25.35it/s]
Epoch[2/15], Train Loss:258.1966, Test Loss:224.7968
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.71it/s]
Epoch[3/15], Train Loss:194.7078, Test Loss:173.6795
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:22<00:00, 25.88it/s]
Epoch[4/15], Train Loss:167.7055, Test Loss:157.9975
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:22<00:00, 25.31it/s]
Epoch[5/15], Train Loss:157.1517, Test Loss:152.2653
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:22<00:00, 25.76it/s]
Epoch[6/15], Train Loss:149.3709, Test Loss:143.7763
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:22<00:00, 25.34it/s]
Epoch[7/15], Train Loss:144.6033, Test Loss:141.1127
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:22<00:00, 25.77it/s]
Epoch[8/15], Train Loss:140.9103, Test Loss:136.2456
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:22<00:00, 25.46it/s]
Epoch[9/15], Train Loss:137.9824, Test Loss:136.2454
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:22<00:00, 25.58it/s]
Epoch[10/15], Train Loss:136.4014, Test Loss:129.3224
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:22<00:00, 25.10it/s]
Epoch[11/15], Train Loss:134.3030, Test Loss:128.2227
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:22<00:00, 25.75it/s]
Epoch[12/15], Train Loss:132.8176, Test Loss:125.9699
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:21<00:00, 26.72it/s]
Epoch[13/15], Train Loss:130.8622, Test Loss:123.4949
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:22<00:00, 25.00it/s]
Epoch[14/15], Train Loss:129.0023, Test Loss:122.3080
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:22<00:00, 25.85it/s]
Epoch[15/15], Train Loss:128.5247, Test Loss:122.0665
exp3_2_loaded = torch.load("./models/exp3_2_17302004056334803.pth")
exp3_2_loaded
# exp3_2_results = get_experiment_results(exp3_2_loaded, test_hyperparam_names=["ratio", "bias"], extra_loader=exp3_extra_loader)
D:\Temps\temp\ipykernel_91216\193238723.py:1: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature.
exp3_2_loaded = torch.load("./models/exp3_2_17302004056334803.pth")
[{'ratio': 0.25,
'bias': 0,
'train_losses': [285.60101888067436,
263.1288438009758,
227.20843028959803,
208.58533259586514,
199.0652768724252,
192.60961242600052,
187.62277014550946,
184.79933610886178,
181.98672103465748,
180.13460894030428,
177.51193681000416,
175.3553095402726,
174.15974689944667,
172.78727854130364,
171.80259385200486],
'test_losses': [279.38514354182223,
239.49537703570198,
217.060605208079,
198.57451341666427,
191.19259344362746,
189.26621292151657,
181.03300117043887,
178.29486753426346,
178.42596708559523,
173.9601492507785,
173.50485614701813,
170.50235975489898,
169.74062514772604,
167.5622280999726,
168.54016506905648],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[-0.1523, -0.0773, -0.1129],
[-0.2663, 0.1314, -0.0533],
[ 0.1193, 0.2347, 0.1989]],
[[-0.0536, -0.1561, -0.1747],
[ 0.1127, 0.0797, -0.0535],
[ 0.1120, 0.3193, -0.0161]],
[[-0.0247, -0.1091, -0.1827],
[-0.2097, 0.1656, -0.1122],
[ 0.0194, 0.0614, 0.2655]]],
[[[ 0.3277, 0.1230, -0.2185],
[-0.0154, -0.1962, 0.0602],
[ 0.0713, -0.1752, 0.0303]],
[[ 0.2652, 0.0770, -0.2367],
[ 0.0680, -0.0019, -0.0602],
[ 0.2056, -0.1793, -0.2694]],
[[ 0.1172, 0.0250, -0.1064],
[ 0.1801, -0.0639, -0.0893],
[ 0.0472, -0.0333, 0.0848]]],
[[[ 0.0949, 0.2757, 0.0202],
[ 0.0330, 0.0178, -0.0251],
[-0.1647, -0.1729, -0.1270]],
[[ 0.0774, 0.0433, 0.1939],
[ 0.1827, 0.0299, 0.0470],
[-0.2656, -0.1621, -0.1767]],
[[ 0.0956, 0.0394, 0.2411],
[-0.1004, -0.0902, 0.0946],
[ 0.0721, 0.0234, -0.2985]]],
[[[-0.1541, -0.0787, -0.2243],
[ 0.1071, -0.1443, -0.0635],
[ 0.3145, -0.0667, 0.2359]],
[[-0.0971, -0.0966, -0.2268],
[-0.0179, 0.0389, -0.2239],
[ 0.1591, 0.2411, 0.1385]],
[[ 0.0799, 0.0823, -0.0626],
[ 0.1028, -0.0625, -0.1571],
[ 0.0919, 0.1127, -0.0068]]],
[[[ 0.0088, 0.1103, -0.1297],
[ 0.1055, -0.0917, -0.1807],
[-0.1220, 0.1195, 0.1939]],
[[ 0.1591, -0.0074, -0.0441],
[-0.1843, -0.2587, -0.0953],
[-0.0346, -0.0670, 0.0437]],
[[ 0.1174, 0.0307, 0.1369],
[ 0.0595, 0.0052, -0.0693],
[-0.1894, 0.0671, -0.1880]]],
[[[ 0.0188, 0.0650, -0.1495],
[ 0.1468, 0.2043, -0.2921],
[ 0.1070, -0.0283, -0.0285]],
[[ 0.1268, 0.0767, -0.2025],
[ 0.1501, -0.1676, 0.0042],
[ 0.2777, 0.0504, -0.1909]],
[[-0.1776, 0.0966, -0.2976],
[ 0.1752, 0.0795, 0.0257],
[ 0.1410, 0.0879, -0.2465]]],
[[[ 0.0377, 0.0185, -0.0334],
[-0.1330, -0.0027, -0.1204],
[ 0.0908, 0.1117, -0.0289]],
[[-0.3226, -0.2324, 0.0939],
[-0.0787, -0.2005, 0.0371],
[ 0.0884, 0.0173, -0.0998]],
[[ 0.0282, -0.1353, 0.2034],
[-0.1141, 0.0665, 0.1155],
[ 0.0825, 0.2154, 0.1297]]],
[[[ 0.1175, 0.2004, 0.0609],
[ 0.0233, 0.0301, 0.0506],
[ 0.0100, -0.1515, -0.2276]],
[[ 0.3286, -0.0232, -0.0705],
[-0.0622, -0.1094, 0.0998],
[ 0.0425, -0.1187, 0.0606]],
[[ 0.1025, -0.0736, 0.0303],
[ 0.1361, -0.0239, -0.0987],
[-0.0500, -0.0485, -0.2305]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([ 0.2275, 0.1235, 0.1745, 0.2452, -0.0546, 0.2130, -0.3062, 0.2186],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[-0.0620, -0.1182, 0.0978],
[-0.0866, -0.0096, 0.0034],
[-0.0678, -0.1468, -0.0922]],
[[-0.0551, -0.0926, -0.0847],
[ 0.0156, 0.1055, -0.0151],
[ 0.0709, -0.1050, -0.0483]],
[[-0.1333, -0.0867, 0.0725],
[ 0.0447, -0.1031, -0.0592],
[ 0.0698, -0.0762, -0.1677]],
...,
[[-0.0866, 0.0030, 0.0082],
[-0.1262, -0.0993, 0.0552],
[-0.1184, -0.1057, -0.0337]],
[[-0.0585, 0.0348, -0.0963],
[-0.0393, 0.0843, -0.0527],
[ 0.0481, -0.1376, -0.0291]],
[[ 0.0240, 0.0395, -0.0063],
[-0.1284, -0.1333, 0.0151],
[ 0.0454, 0.0034, 0.0443]]],
[[[ 0.0531, -0.0526, -0.0537],
[-0.0647, -0.0128, -0.0721],
[ 0.1630, -0.0359, 0.0812]],
[[ 0.0149, 0.2055, 0.1578],
[ 0.0497, 0.1754, 0.0323],
[ 0.0703, -0.0536, 0.0590]],
[[-0.0620, -0.0210, 0.0264],
[-0.1904, -0.1783, -0.0548],
[ 0.0673, -0.0651, 0.0041]],
...,
[[ 0.1631, 0.0059, 0.1200],
[ 0.1638, 0.0462, -0.0726],
[ 0.0534, 0.0727, -0.0588]],
[[-0.0232, -0.0012, -0.0633],
[-0.0790, -0.0332, 0.0123],
[-0.1287, -0.0758, 0.1302]],
[[-0.0058, 0.1056, -0.1245],
[ 0.1252, 0.0183, -0.0710],
[ 0.0661, 0.0654, -0.0861]]],
[[[-0.2948, -0.0618, 0.1557],
[-0.0112, 0.0559, 0.2870],
[-0.3797, 0.0139, -0.0121]],
[[-0.0912, 0.0721, -0.1241],
[ 0.1783, 0.1585, -0.1194],
[-0.2620, 0.1524, -0.1744]],
[[-0.1084, -0.1480, -0.1109],
[ 0.1240, -0.0301, -0.0901],
[ 0.0327, 0.0604, 0.0452]],
...,
[[-0.1587, -0.1341, 0.0158],
[-0.0132, 0.1977, 0.1563],
[-0.4267, 0.0588, -0.1386]],
[[-0.0119, -0.1043, -0.0680],
[-0.0032, -0.1911, -0.1774],
[ 0.2749, -0.0752, -0.2279]],
[[ 0.0608, 0.1326, -0.1098],
[ 0.0197, 0.1264, 0.0327],
[-0.1633, 0.1057, 0.0215]]],
...,
[[[ 0.0368, 0.0533, -0.1047],
[ 0.0444, -0.0294, -0.1479],
[-0.0259, -0.0150, -0.0891]],
[[-0.2848, -0.0623, -0.0020],
[-0.0533, 0.1008, 0.0540],
[ 0.0016, 0.1140, 0.1449]],
[[ 0.0886, 0.0865, 0.1307],
[-0.0143, 0.0884, 0.1310],
[ 0.1458, 0.0720, 0.1264]],
...,
[[-0.0722, -0.0384, 0.1474],
[-0.2310, -0.1018, 0.0049],
[-0.0676, -0.1545, -0.0324]],
[[-0.0358, -0.1312, -0.1859],
[ 0.0976, -0.1062, -0.1038],
[ 0.0914, -0.0124, 0.0264]],
[[-0.1615, 0.0458, 0.2021],
[ 0.0081, -0.0857, 0.0783],
[ 0.0428, 0.0338, 0.0381]]],
[[[-0.1266, -0.2603, -0.0201],
[-0.1353, -0.1716, 0.1380],
[ 0.1663, 0.1114, 0.0237]],
[[ 0.0309, -0.1293, -0.2160],
[ 0.0714, -0.1926, -0.1856],
[-0.0640, -0.0264, -0.0484]],
[[ 0.2844, 0.2588, 0.1256],
[-0.0647, 0.1137, 0.1321],
[-0.0820, -0.2165, -0.0604]],
...,
[[-0.1031, -0.1013, -0.0284],
[-0.0931, -0.0458, -0.1065],
[-0.0058, 0.0107, -0.2370]],
[[ 0.0833, -0.0793, -0.0898],
[-0.0011, -0.0291, 0.0617],
[ 0.1020, 0.1549, 0.0358]],
[[ 0.1058, -0.1021, -0.1506],
[-0.0538, 0.0139, 0.0437],
[-0.0522, -0.1077, -0.0077]]],
[[[ 0.0400, -0.0473, -0.1268],
[-0.0873, 0.0834, 0.0077],
[ 0.0718, 0.0971, 0.0163]],
[[ 0.0205, -0.0344, 0.0285],
[ 0.0214, -0.1185, 0.1543],
[-0.0874, -0.0622, -0.0364]],
[[-0.3007, -0.3178, -0.2019],
[-0.1522, -0.3547, -0.3487],
[ 0.1860, 0.1410, -0.0062]],
...,
[[-0.0503, 0.0448, -0.0063],
[-0.0968, 0.0034, 0.0225],
[ 0.1004, 0.0598, -0.0272]],
[[-0.0394, -0.1373, 0.0507],
[-0.1305, -0.1111, -0.2364],
[ 0.0924, -0.0820, -0.1255]],
[[ 0.1351, -0.0298, -0.0882],
[-0.0971, -0.0277, -0.2299],
[ 0.0518, -0.0865, -0.0005]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([-0.1149, -0.2141, 0.2413, -0.0924, -0.0603, -0.3886, 0.0706, -0.1049,
-0.1021, 0.0976, 0.2197, 0.4431, 0.0456, -0.2886, -0.0043, 0.2041],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[-0.0325, -0.0028, -0.0241],
[ 0.0366, 0.0069, 0.0614],
[ 0.0996, -0.0383, 0.0078]],
[[ 0.0886, -0.0404, 0.1289],
[ 0.0372, 0.0701, 0.0990],
[-0.0265, 0.0278, 0.1620]],
[[ 0.1394, 0.1503, -0.0332],
[-0.0071, -0.0684, -0.3050],
[ 0.1016, 0.1918, 0.0435]],
...,
[[ 0.3824, 0.0123, -0.0073],
[-0.0401, -0.1085, -0.0083],
[-0.1506, -0.1128, 0.0526]],
[[-0.1370, 0.0116, -0.1983],
[-0.1676, -0.0999, -0.0138],
[-0.0937, -0.0991, 0.0911]],
[[ 0.0591, -0.0100, 0.0550],
[ 0.1068, 0.1116, -0.0579],
[ 0.0645, 0.2001, -0.0133]]],
[[[ 0.0166, 0.0461, -0.0505],
[-0.0443, -0.0260, -0.0189],
[ 0.0351, 0.0281, 0.0605]],
[[-0.0907, 0.0165, 0.0238],
[ 0.0455, -0.0054, -0.0322],
[-0.0343, -0.0466, -0.0190]],
[[-0.0702, -0.0585, 0.0180],
[-0.0718, -0.1076, -0.1010],
[ 0.0093, -0.0628, -0.0775]],
...,
[[ 0.0317, -0.0573, -0.0370],
[ 0.0198, -0.0051, 0.0404],
[ 0.0331, -0.1162, 0.0191]],
[[ 0.0240, 0.0318, -0.0707],
[ 0.0619, -0.0069, 0.0539],
[-0.0822, 0.0311, 0.0430]],
[[-0.0038, -0.0178, 0.0116],
[ 0.0412, -0.0458, -0.0588],
[ 0.0359, 0.0427, -0.0890]]],
[[[ 0.0300, 0.0294, -0.0736],
[ 0.0156, 0.0747, -0.0483],
[-0.0097, -0.0137, 0.0304]],
[[ 0.0421, -0.2128, -0.0819],
[ 0.0516, -0.1340, -0.0134],
[ 0.0782, 0.0793, 0.1028]],
[[ 0.1058, 0.2718, 0.0932],
[ 0.0787, 0.3077, 0.1951],
[-0.1103, -0.0378, -0.2109]],
...,
[[-0.1820, -0.1675, -0.0804],
[-0.1062, -0.0467, -0.1129],
[-0.0306, -0.1130, -0.0821]],
[[ 0.0688, -0.0984, -0.0152],
[ 0.0588, 0.0884, 0.0216],
[-0.0935, -0.0662, -0.1653]],
[[-0.0873, -0.0251, -0.1141],
[ 0.0081, -0.0795, 0.1038],
[ 0.0211, 0.0032, 0.0841]]],
...,
[[[ 0.0613, -0.0488, 0.0241],
[ 0.0409, 0.0833, 0.0407],
[-0.0753, 0.0685, 0.0429]],
[[-0.0988, -0.0465, -0.1394],
[ 0.0367, 0.0432, -0.0917],
[-0.1583, -0.0166, -0.1195]],
[[-0.3096, -0.1492, -0.3836],
[ 0.0084, 0.2377, 0.0267],
[-0.0557, 0.0941, -0.1131]],
...,
[[ 0.0751, 0.0750, -0.1014],
[ 0.0969, 0.0852, -0.0560],
[ 0.0796, 0.1299, 0.0184]],
[[-0.1766, -0.2031, -0.2026],
[-0.0197, 0.0531, 0.0390],
[ 0.0712, 0.1893, -0.0487]],
[[-0.0403, 0.0031, -0.1264],
[ 0.1276, 0.2123, 0.1120],
[ 0.0622, 0.0779, -0.0498]]],
[[[ 0.0070, -0.0718, 0.0293],
[-0.0840, -0.0723, 0.0687],
[-0.0510, 0.0560, 0.0107]],
[[ 0.0720, -0.1333, -0.0879],
[ 0.0854, -0.0329, 0.0827],
[ 0.0662, -0.0986, -0.0078]],
[[-0.0678, 0.1677, -0.0685],
[-0.1904, 0.1986, 0.0049],
[-0.1344, 0.1621, -0.0782]],
...,
[[-0.0847, -0.0239, -0.0035],
[-0.1917, -0.1198, -0.0470],
[-0.0932, 0.0813, 0.1540]],
[[ 0.0432, 0.0310, -0.0801],
[-0.0761, -0.0406, -0.2036],
[ 0.0368, 0.1589, 0.0334]],
[[-0.0587, 0.0486, -0.0553],
[-0.1535, 0.1623, 0.1503],
[-0.2253, 0.1490, 0.2127]]],
[[[ 0.0219, -0.0703, -0.0722],
[-0.0686, -0.0978, -0.0575],
[ 0.0758, 0.0591, 0.0258]],
[[ 0.1017, 0.0086, -0.0928],
[-0.0078, -0.0877, -0.1133],
[-0.1187, 0.0390, 0.0427]],
[[-0.0650, -0.0480, -0.0121],
[-0.1027, 0.0808, 0.1016],
[ 0.0319, -0.0092, 0.0996]],
...,
[[-0.0615, -0.1215, -0.2080],
[-0.1053, -0.0559, -0.0303],
[-0.2299, -0.1294, -0.1091]],
[[-0.0122, -0.0628, 0.0094],
[-0.0478, -0.1228, 0.0127],
[ 0.0567, 0.0763, 0.0761]],
[[ 0.0241, 0.0190, 0.1096],
[-0.0824, 0.0593, 0.0552],
[-0.1277, -0.1938, -0.0822]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([ 0.2661, -0.0829, 0.2653, 0.1021, 0.0145, -0.0844, -0.0671, 0.0590,
-0.2695, 0.1640, -0.1131, 0.0048, 0.0494, -0.0116, -0.0065, -0.1071,
0.3102, 0.1289, -0.0744, 0.0406, 0.0499, 0.0691, 0.2331, 0.1059,
0.2002, 0.0814, 0.0600, 0.1339, -0.0126, 0.2261, 0.0820, 0.0792],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[-0.1044, 0.0135, 0.0648],
[-0.2170, -0.2544, -0.0492],
[-0.0417, -0.4206, -0.1036]],
[[-0.0595, -0.0302, -0.0152],
[-0.0162, -0.0791, -0.0668],
[-0.0120, -0.0548, -0.0913]],
[[-0.0324, -0.1203, -0.1256],
[-0.0112, 0.0055, -0.0111],
[ 0.1346, 0.1667, 0.0620]],
...,
[[-0.0009, -0.0119, -0.0594],
[ 0.0899, 0.0759, -0.0010],
[ 0.0069, 0.1543, 0.0843]],
[[ 0.0922, -0.2403, -0.0454],
[-0.0572, -0.1785, -0.1189],
[ 0.1228, -0.0617, 0.0393]],
[[ 0.0961, -0.0624, 0.0155],
[ 0.2000, 0.1006, 0.0365],
[ 0.2301, 0.0230, 0.0032]]],
[[[-0.0529, -0.0540, -0.0986],
[-0.0171, -0.0623, -0.0522],
[-0.0469, -0.0296, -0.0389]],
[[ 0.0468, 0.0536, -0.0088],
[-0.0551, -0.0486, 0.0276],
[-0.0160, 0.0052, -0.0272]],
[[-0.0321, -0.0150, -0.0295],
[-0.0033, -0.0490, -0.0789],
[-0.0559, -0.0469, -0.0119]],
...,
[[ 0.0198, -0.0530, -0.0669],
[ 0.0147, -0.0393, 0.0134],
[ 0.0254, -0.0084, -0.0581]],
[[-0.0036, -0.0253, 0.0053],
[-0.0578, 0.0425, 0.0056],
[ 0.0166, 0.0369, 0.0161]],
[[-0.0796, -0.0743, -0.0532],
[-0.0643, -0.0909, -0.0356],
[-0.0364, -0.0500, -0.0241]]],
[[[-0.0164, -0.0702, -0.0414],
[ 0.1292, -0.0409, 0.0269],
[ 0.0966, 0.0329, -0.0271]],
[[ 0.0207, -0.0398, 0.0558],
[-0.0300, -0.0512, 0.1037],
[-0.0450, 0.0153, 0.0417]],
[[-0.0215, 0.0039, 0.1180],
[-0.1032, 0.1367, 0.1297],
[-0.1951, 0.0656, 0.1305]],
...,
[[-0.0539, 0.0277, 0.1282],
[-0.1596, -0.0307, 0.0775],
[-0.2690, -0.1529, -0.0681]],
[[-0.0647, 0.0125, -0.0465],
[-0.0917, 0.0574, -0.1275],
[-0.1370, 0.1011, -0.0986]],
[[-0.2183, -0.0499, 0.0782],
[-0.1409, 0.0008, 0.0312],
[-0.0418, 0.0101, -0.0207]]],
...,
[[[ 0.0156, 0.1028, 0.1197],
[-0.0453, -0.0977, 0.0438],
[ 0.0078, -0.0952, -0.1755]],
[[ 0.0354, 0.0153, -0.0234],
[-0.0472, -0.0030, -0.0549],
[ 0.0392, 0.0383, 0.0618]],
[[-0.0727, -0.1705, -0.2092],
[-0.1636, -0.0826, -0.1187],
[ 0.0822, 0.2257, 0.1818]],
...,
[[ 0.0473, -0.0188, 0.0460],
[-0.0316, -0.0361, -0.0020],
[ 0.0694, 0.0054, 0.1502]],
[[ 0.1835, 0.1227, 0.0980],
[ 0.0985, -0.0682, -0.0956],
[ 0.0055, -0.2075, -0.2732]],
[[-0.1787, -0.1806, -0.0479],
[ 0.0867, 0.2297, 0.2666],
[-0.0556, 0.1080, 0.1119]]],
[[[ 0.0273, -0.0914, -0.0664],
[-0.0892, -0.0679, -0.0665],
[-0.0478, -0.0736, -0.0950]],
[[-0.0204, -0.0465, 0.0530],
[-0.0014, 0.0264, 0.0036],
[-0.0532, -0.0574, -0.0607]],
[[ 0.0271, 0.0052, 0.0081],
[-0.0909, -0.1072, -0.1269],
[-0.0229, -0.0465, -0.0353]],
...,
[[-0.0282, -0.1465, 0.0287],
[-0.0057, -0.0638, -0.1174],
[-0.0058, -0.0444, -0.0304]],
[[ 0.0180, -0.0577, -0.0063],
[-0.0631, -0.0019, 0.0549],
[-0.0275, -0.0076, -0.0268]],
[[-0.0686, -0.0996, -0.0948],
[ 0.0666, 0.0792, 0.0593],
[-0.0649, -0.1191, -0.0952]]],
[[[ 0.0902, 0.1045, 0.1187],
[-0.0232, 0.0865, 0.0782],
[-0.0833, -0.0357, -0.0147]],
[[-0.0202, -0.0058, 0.0956],
[-0.0289, 0.0097, 0.0437],
[ 0.0508, -0.0319, -0.0109]],
[[-0.0768, -0.0530, 0.0310],
[-0.1209, -0.2261, -0.0537],
[-0.0842, -0.0379, -0.0368]],
...,
[[-0.1073, -0.0634, -0.1424],
[-0.1791, -0.0778, -0.0980],
[ 0.0189, 0.0226, 0.0271]],
[[ 0.0651, -0.1226, 0.1632],
[ 0.0779, 0.0357, -0.0529],
[ 0.1461, 0.1518, -0.3584]],
[[-0.1749, -0.1744, -0.0146],
[ 0.0454, 0.0417, 0.0765],
[ 0.1629, 0.0601, 0.0379]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([ 0.0131, -0.0771, 0.1334, -0.0214, -0.0691, -0.0571, -0.0660, -0.0781,
-0.1219, -0.0359, 0.0674, -0.0868, -0.0855, -0.0508, 0.0697, 0.1287,
0.1087, -0.0540, -0.0853, 0.1101, 0.1470, -0.0783, 0.0681, -0.0280,
0.0571, 0.0015, -0.0728, -0.0550, -0.0586, 0.0891, -0.1075, -0.0392],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[-0.1552, -0.0519, -0.0509],
[-0.2575, -0.0810, 0.1159],
[-0.2358, -0.0898, 0.0839]],
[[-0.0499, 0.0330, -0.0267],
[ 0.0155, 0.0495, -0.0451],
[ 0.0265, 0.0516, 0.0403]],
[[ 0.1153, 0.0176, 0.0979],
[ 0.0819, -0.0348, 0.0023],
[ 0.0907, -0.0961, -0.0966]],
...,
[[ 0.0523, 0.1239, 0.0691],
[-0.0149, 0.0935, 0.0589],
[ 0.1399, 0.0400, 0.0630]],
[[ 0.0189, 0.1003, 0.0175],
[ 0.1395, -0.0702, -0.0588],
[ 0.0961, 0.0320, 0.0393]],
[[-0.1969, -0.1163, 0.0560],
[-0.3525, -0.3557, 0.0957],
[-0.2943, -0.1423, 0.0573]]],
[[[-0.1047, 0.2020, 0.1547],
[-0.0406, 0.1973, 0.0111],
[ 0.0760, 0.0195, -0.1707]],
[[ 0.0742, 0.0638, -0.0054],
[ 0.0298, 0.0207, 0.0497],
[ 0.0011, -0.0052, 0.0348]],
[[-0.1366, 0.1355, 0.1824],
[-0.0922, 0.0394, 0.0316],
[-0.0483, -0.0064, -0.1199]],
...,
[[-0.0343, 0.0048, -0.0869],
[ 0.0771, 0.0552, -0.0599],
[ 0.0481, 0.0081, -0.0420]],
[[ 0.0156, -0.1075, -0.1086],
[-0.0093, -0.1423, -0.0749],
[ 0.0462, 0.1030, 0.0200]],
[[-0.0623, 0.0549, 0.1783],
[-0.1970, 0.0667, 0.1115],
[-0.1500, -0.0631, -0.1099]]],
[[[-0.1458, -0.1944, -0.1346],
[-0.1510, 0.0865, 0.1404],
[ 0.0522, 0.1724, 0.0425]],
[[ 0.0685, 0.0396, -0.0446],
[ 0.0582, 0.0217, 0.0348],
[ 0.0588, 0.1138, -0.0178]],
[[-0.0377, -0.1435, 0.1307],
[-0.0249, -0.0944, -0.0763],
[ 0.0241, 0.0146, 0.0773]],
...,
[[-0.1642, -0.0183, -0.0212],
[ 0.0616, 0.3129, 0.1863],
[ 0.2512, 0.3533, 0.1329]],
[[ 0.0356, -0.0031, 0.0300],
[ 0.0434, -0.0480, 0.0655],
[ 0.0164, -0.0410, -0.0494]],
[[-0.2098, -0.1373, -0.1250],
[-0.1245, -0.0452, 0.0459],
[ 0.1404, 0.1949, 0.1373]]],
...,
[[[-0.0130, -0.1314, 0.0587],
[-0.0759, -0.1471, 0.0269],
[ 0.0036, -0.0081, 0.0998]],
[[ 0.0266, 0.0258, 0.0261],
[ 0.0091, 0.0416, -0.0546],
[-0.0172, 0.0032, -0.0117]],
[[ 0.0440, 0.0710, 0.1401],
[-0.0312, -0.0013, 0.0179],
[-0.1003, -0.0841, -0.0385]],
...,
[[ 0.0260, 0.0112, 0.0129],
[ 0.0025, -0.0335, 0.1777],
[-0.1520, -0.0342, 0.0677]],
[[ 0.0499, 0.0323, 0.0285],
[-0.0054, 0.0947, 0.0419],
[-0.0863, -0.0549, -0.0300]],
[[ 0.0192, -0.0432, 0.0021],
[ 0.0339, -0.1308, -0.0451],
[-0.2211, -0.0120, 0.1216]]],
[[[-0.1075, -0.4029, 0.1284],
[-0.1957, -0.5555, -0.0705],
[ 0.0092, -0.1629, -0.0072]],
[[-0.0227, -0.1098, 0.0024],
[-0.0137, -0.0496, -0.0092],
[-0.0154, -0.0585, -0.0435]],
[[-0.0103, 0.0250, -0.0181],
[ 0.0461, 0.0721, -0.0381],
[-0.0210, 0.1491, -0.1583]],
...,
[[ 0.0343, -0.1281, 0.2821],
[-0.2338, -0.2356, 0.1578],
[-0.1810, -0.1982, 0.0497]],
[[ 0.0046, 0.0130, -0.0333],
[-0.0506, -0.0307, -0.0511],
[ 0.1405, 0.0610, -0.0745]],
[[-0.1059, -0.2327, 0.1296],
[-0.2746, -0.3250, 0.2112],
[-0.4631, -0.2064, 0.0431]]],
[[[-0.1013, -0.2022, -0.1037],
[-0.0657, -0.1464, 0.0832],
[-0.1211, 0.0648, 0.1268]],
[[-0.0857, 0.0066, -0.0571],
[-0.0115, -0.0022, -0.0439],
[-0.0967, -0.0338, -0.0573]],
[[-0.0014, 0.0211, -0.1271],
[ 0.0855, -0.0294, -0.1167],
[ 0.0610, 0.0291, -0.2276]],
...,
[[-0.0413, -0.1721, 0.0187],
[-0.0820, -0.0392, 0.1160],
[ 0.0828, 0.1037, -0.0739]],
[[-0.0480, 0.0897, 0.0830],
[ 0.0411, 0.0210, -0.0054],
[-0.0235, -0.1009, -0.0009]],
[[ 0.0732, -0.0890, -0.0038],
[ 0.0572, -0.1282, -0.1382],
[ 0.0173, -0.1957, -0.1181]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([ 0.1735, -0.1279, -0.0011, -0.0663, 0.0813, 0.0879, -0.1363, 0.1467,
0.0452, 0.0680, -0.0377, -0.1408, 0.1032, 0.0723, 0.1053, -0.1511,
0.0950, -0.0542, -0.0770, -0.1164, -0.1639, 0.0315, 0.0679, -0.1166,
0.0393, 0.0891, -0.0547, 0.0517, 0.0425, 0.2776, 0.1354, 0.0227],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[-4.1576e-02, -5.8118e-02, -5.8533e-02],
[ 1.3255e-01, 9.9924e-02, -3.6299e-02],
[ 1.4689e-01, 6.6801e-02, 4.5159e-02]],
[[-1.9791e-01, -2.7900e-02, -1.0777e-01],
[-9.1936e-02, 5.3626e-02, -4.0800e-02],
[ 3.3428e-02, 5.2492e-02, 1.6018e-01]],
[[-4.2308e-02, -2.7832e-01, -3.3056e-01],
[-1.5255e-01, -2.7428e-01, -3.5702e-02],
[-4.9653e-02, -7.4538e-02, 2.0130e-02]],
...,
[[ 1.0365e-01, 9.1555e-02, -9.2937e-02],
[ 1.2009e-01, 7.3179e-02, -8.1943e-02],
[ 1.2222e-01, 3.7073e-02, -1.0016e-01]],
[[ 1.5448e-02, 1.4590e-02, -1.6916e-01],
[-2.7834e-02, 1.9011e-02, -6.2519e-02],
[-7.0399e-02, 4.2633e-02, 2.6001e-02]],
[[-4.6408e-02, -1.1521e-01, -5.4578e-02],
[-2.9376e-02, -6.9955e-02, -3.2514e-02],
[ 5.2901e-02, 3.2302e-02, 4.3197e-02]]],
[[[ 1.4507e-02, -4.6909e-03, -1.2982e-01],
[ 6.9797e-02, -1.6065e-01, -1.4398e-02],
[ 8.2945e-02, 1.1693e-01, -1.4507e-01]],
[[-1.5014e-01, 1.5154e-02, 6.1920e-03],
[ 4.6257e-02, -4.6465e-02, -1.3681e-01],
[ 8.4965e-02, 3.3997e-02, -1.1007e-01]],
[[ 6.7586e-02, 1.0124e-01, 1.8698e-01],
[-5.0019e-02, 8.3855e-02, 1.5431e-01],
[-2.3627e-02, 5.9497e-02, -1.0719e-01]],
...,
[[-3.4177e-01, -7.9046e-02, -8.2522e-02],
[-2.3318e-01, 6.0564e-02, 4.6653e-02],
[-1.4331e-01, 9.2372e-02, 4.5264e-02]],
[[-9.9714e-02, 2.0724e-01, 1.0366e-01],
[-3.3388e-01, -1.3017e-01, 7.8101e-02],
[-2.8983e-01, -2.8318e-01, 1.2984e-01]],
[[-1.3519e-01, -1.1928e-01, -1.0344e-01],
[-1.9977e-01, -2.2249e-01, -2.5948e-01],
[-6.4007e-01, -2.1064e-01, 1.7739e-02]]],
[[[-3.3940e-02, -5.4895e-03, -4.0379e-02],
[-3.3365e-02, -4.7593e-02, -1.1789e-01],
[ 3.4635e-02, -7.2403e-02, -3.8589e-02]],
[[-5.2574e-02, -4.8961e-02, -1.2727e-03],
[-2.4146e-02, -7.8158e-02, -9.4436e-02],
[-1.1634e-03, -4.4617e-02, -2.1677e-02]],
[[ 3.5697e-02, -3.0053e-02, -6.4876e-02],
[-3.6717e-02, -1.2391e-02, -4.1798e-02],
[-2.6112e-02, -6.0833e-02, -6.9750e-02]],
...,
[[ 7.0786e-03, -5.7903e-02, -5.5033e-02],
[-4.7037e-02, -7.8996e-02, -3.0498e-02],
[-6.4429e-02, -1.2210e-03, -7.7122e-02]],
[[-6.9042e-02, -7.7964e-02, -6.3096e-02],
[ 3.2866e-02, -6.0116e-02, -2.5262e-02],
[-1.0719e-02, 2.4929e-02, -3.2025e-02]],
[[-2.2069e-02, 1.7265e-02, 4.9760e-02],
[-7.8188e-02, -8.3043e-03, 1.2801e-02],
[-6.6582e-02, -4.0071e-02, 2.6173e-02]]],
...,
[[[-2.7554e-02, -1.9788e-02, -1.6460e-02],
[-2.0274e-02, -4.6307e-02, -1.2933e-02],
[ 1.9224e-02, 8.8940e-03, 2.0408e-02]],
[[-6.0393e-02, -4.9516e-02, -6.2075e-02],
[ 8.1339e-03, -1.2870e-02, -3.9226e-02],
[-7.8383e-02, -3.2933e-02, -3.1997e-02]],
[[-4.6498e-04, -3.5695e-03, -8.8145e-03],
[-2.9272e-02, -6.7960e-02, -2.6605e-02],
[-1.7449e-02, -3.3339e-02, -4.5673e-02]],
...,
[[-2.3190e-02, 1.5697e-02, -1.3621e-02],
[-3.9511e-02, 2.4590e-02, -5.3533e-02],
[-1.8605e-02, 1.3869e-02, 1.8185e-02]],
[[-2.1172e-02, -9.0145e-02, 2.9266e-02],
[-1.8101e-02, -2.4751e-02, 1.3206e-02],
[ 3.6645e-03, -5.5982e-02, -4.5927e-02]],
[[-7.5826e-02, -3.0295e-02, -7.4960e-02],
[-5.6716e-02, -6.3339e-02, -3.3288e-02],
[-4.3073e-03, 2.9600e-02, 1.6011e-02]]],
[[[-9.2264e-02, -5.7251e-02, -1.3349e-01],
[ 4.0072e-02, -1.6681e-01, -2.4926e-02],
[ 8.6269e-03, -2.9756e-03, 1.1247e-01]],
[[ 1.1278e-02, 2.2770e-01, 1.0637e-01],
[ 2.1892e-02, -3.4646e-02, -5.2008e-02],
[-2.1264e-02, -6.3060e-02, -8.6048e-02]],
[[ 3.0332e-02, 6.1380e-02, 1.3359e-02],
[ 2.2208e-01, 2.4077e-01, 1.6414e-01],
[ 1.6874e-02, 1.4207e-01, 7.0106e-02]],
...,
[[ 5.3013e-02, 1.3062e-02, -7.8441e-02],
[-6.2852e-03, 3.8725e-02, 5.4856e-02],
[-6.7358e-02, -7.7894e-04, 5.4953e-02]],
[[ 6.3726e-02, 2.1840e-02, -7.8111e-02],
[ 2.9160e-02, -3.8670e-02, -2.3099e-01],
[ 1.3812e-01, 1.4945e-01, -8.7929e-02]],
[[-1.1779e-01, -1.3741e-01, -8.5405e-02],
[ 5.0275e-03, -1.3993e-01, -1.0505e-01],
[-2.3879e-02, -1.9678e-01, -1.8763e-01]]],
[[[-8.0733e-03, -3.3905e-03, 1.0432e-01],
[-9.1177e-02, -5.7997e-04, -1.9422e-01],
[ 1.3891e-01, -5.0020e-02, -6.9154e-02]],
[[-3.9204e-02, -7.9488e-04, -1.2960e-02],
[ 6.8809e-02, -6.3275e-02, -1.0106e-02],
[-2.8642e-02, 4.5549e-02, 8.9860e-03]],
[[ 3.3819e-02, -8.8262e-03, -1.0914e-01],
[ 6.3235e-02, 2.0745e-02, 1.6028e-02],
[ 5.5446e-02, 1.1735e-01, 1.1790e-01]],
...,
[[ 1.0078e-01, -1.3990e-01, -2.7086e-01],
[ 1.4578e-01, -8.2395e-02, -2.2490e-01],
[ 8.1702e-02, -1.2346e-01, -3.8225e-02]],
[[-1.0775e-02, -9.0836e-02, 1.1284e-01],
[-1.8424e-01, -2.0947e-01, -2.1780e-01],
[ 6.2989e-02, 2.6555e-02, -7.3723e-02]],
[[-9.2946e-02, -3.5436e-02, 1.7855e-01],
[-3.4277e-01, -5.4578e-02, -6.6039e-02],
[ 6.6520e-02, -3.1352e-02, -3.0146e-03]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([-0.0965, -0.0791, -0.0836, 0.2204, 0.2426, 0.0093, -0.0492, -0.1439,
0.0573, -0.0496, 0.0431, 0.0085, 0.0080, -0.0620, 0.0985, -0.0885,
0.0593, -0.0135, -0.0848, -0.1882, -0.0509, 0.0714, 0.0401, 0.0227,
-0.0912, 0.0388, -0.0797, 0.1527, 0.0645, -0.1149, 0.0201, -0.1309],
device='cuda:0')),
('fc_layers.0.weight',
tensor([[ 0.1730, 0.1737, 0.1682, ..., -0.0228, 0.1535, 0.0101],
[-0.0345, -0.0393, -0.0174, ..., 0.0393, -0.0238, 0.0315],
[ 0.0044, -0.0287, -0.0224, ..., -0.0403, -0.0020, -0.0050],
...,
[-0.0699, -0.0408, -0.0943, ..., 0.0130, -0.0079, -0.0280],
[-0.0010, -0.0456, -0.0837, ..., -0.0313, -0.0057, -0.0292],
[-0.0257, 0.0016, -0.0488, ..., -0.0342, 0.0102, 0.0034]],
device='cuda:0')),
('fc_layers.0.bias',
tensor([ 2.6921e-01, -7.3277e-02, -5.4068e-02, 1.9524e-01, -8.5471e-02,
1.3690e-01, 1.5270e-02, -3.5561e-02, -4.7550e-02, 9.4337e-02,
1.4099e-01, 2.2591e-02, 9.1625e-02, 1.8867e-01, -8.0091e-02,
1.9321e-01, -4.6850e-02, 1.3491e-02, -4.9780e-02, -7.9409e-02,
-2.0309e-02, 6.5758e-02, -4.4828e-02, 3.2570e-02, 1.3321e-01,
2.1681e-01, 4.5352e-03, -7.7588e-02, -9.0978e-02, 7.5637e-03,
-1.1793e-01, -7.3295e-03, 2.6757e-01, -1.3697e-02, -5.3069e-02,
-7.4699e-03, 2.5984e-02, -4.3667e-02, -5.6109e-02, -5.5986e-02,
7.4601e-02, -7.2637e-02, -6.2546e-02, 1.1463e-02, -1.4555e-01,
-6.6817e-02, 2.4536e-02, -5.1453e-02, -6.0226e-02, 1.0349e-02,
-6.1413e-02, 8.4456e-02, 1.5365e-01, 2.0548e-01, 5.9736e-04,
-3.8307e-02, 5.4825e-02, 1.3244e-01, -6.6952e-02, 2.8024e-02,
-7.2210e-02, 3.9165e-04, -2.6021e-02, -7.5572e-02, -1.0346e-01,
-1.8371e-02, -2.0498e-02, -3.0314e-02, 6.8811e-02, 7.6515e-02,
-6.8200e-02, 4.5228e-02, -1.6176e-02, -2.2719e-02, 3.1892e-02,
-3.8959e-02, 6.7465e-02, 5.3150e-02, 7.6578e-02, -5.5756e-02,
2.5509e-02, -2.3567e-02, -3.9728e-02, 8.2288e-02, -2.0203e-02,
1.4484e-01, 2.9709e-04, -1.0942e-03, 5.7914e-02, 1.4442e-01,
2.0892e-04, 3.3534e-02, -4.8171e-02, -2.5634e-02, -9.5752e-02,
-3.2042e-02, 3.0661e-02, 1.4038e-02, -1.1821e-02, -4.6252e-02,
-4.2756e-02, 2.0319e-02, 1.9529e-01, -7.7924e-02, -1.4541e-02,
-3.1989e-02, 3.0747e-03, 8.2944e-02, -1.0135e-02, -8.5780e-02,
8.4992e-02, -3.4151e-03, -4.6953e-02, 1.2339e-01, 2.1640e-01,
9.8832e-02, -4.2553e-02, 4.2601e-02, 2.0050e-02, 1.5316e-01,
1.2756e-01, 1.0408e-01, -5.9964e-02, -4.9039e-02, -5.5005e-02,
-2.8679e-02, -5.4390e-02, 8.5000e-02, -7.9710e-02, -3.0660e-02,
1.9941e-01, -5.7621e-02, 8.5343e-02, 9.9540e-02, -8.1599e-02,
1.5005e-01, 2.4118e-02, -2.7270e-02, -1.0355e-01, 6.2353e-03,
-3.3590e-02, -3.9576e-02, 2.0627e-01, -2.7468e-02, 2.9328e-01,
-4.7480e-02, 8.7987e-03, -6.3090e-02, -8.5421e-02, -3.8656e-03,
2.7179e-02, -8.0824e-03, -6.5520e-02, 4.4898e-03, 3.4182e-01,
-8.2514e-02, 1.0987e-01, -3.7547e-03, 1.9862e-02, 8.9964e-02,
-8.4985e-03, -6.5597e-02, -8.4227e-02, -2.0651e-02, 8.4188e-02,
1.4545e-02, 1.8733e-01, 1.3402e-01, -8.2125e-02, -4.4670e-02,
-8.5332e-03, 9.4724e-02, -4.5332e-02, 6.7519e-02, -3.4476e-02,
-5.5496e-02, -8.0065e-02, -9.3273e-03, -5.6827e-02, 1.1733e-02,
1.2473e-01, 7.6863e-02, 3.6777e-02, 2.8112e-02, -1.2604e-01,
2.5281e-02, 6.6926e-02, 9.3122e-03, -7.7683e-02, 1.1754e-01,
1.7614e-01, -6.9454e-02, 4.7577e-04, 1.0581e-01, 1.2023e-01,
-2.8479e-02, -5.6079e-02, -6.8688e-02, 3.5854e-03, 1.9739e-01,
-5.6146e-02, -6.1615e-02, 5.9744e-02, -8.7605e-02, 1.1714e-02,
-4.8972e-02, 1.0678e-01, -8.6421e-03, 1.4566e-02, -6.4719e-02,
-2.5109e-02, 3.6399e-02, -8.8483e-02, 1.0280e-01, -3.5295e-04,
5.4294e-02, -1.0203e-01, -5.5900e-02, -2.2589e-02, -3.4452e-02,
-9.6447e-03, -2.0737e-02, -7.5055e-02, 3.1599e-02, -4.6377e-02,
-3.5242e-02, -7.4156e-03, -1.2004e-03, -7.7189e-02, 4.2643e-02,
-9.2853e-02, -2.1827e-02, -2.2879e-02, -3.8161e-02, -9.6846e-02,
-3.2755e-02, -5.2408e-02, -4.2639e-03, -4.9048e-02, -4.9256e-02,
1.5339e-03, 8.3496e-02, 1.2905e-01, -1.2764e-02, -7.1751e-02,
-5.8355e-02, 2.4672e-01, 1.2638e-03, 1.9545e-03, -5.0857e-02,
-4.0469e-02, -5.8560e-02, -7.4117e-02, 4.7055e-02, -3.9692e-02,
-6.6699e-02], device='cuda:0')),
('fc_layers.2.weight',
tensor([[-0.0748, -0.0681, 0.0350, ..., 0.0026, -0.0023, -0.0012],
[ 0.0906, -0.0052, 0.0295, ..., 0.0182, 0.0344, -0.0035],
[ 0.0037, -0.0090, -0.0270, ..., 0.0181, -0.0327, -0.0272],
...,
[-0.0837, -0.0199, -0.0064, ..., 0.0284, -0.0195, -0.0748],
[ 0.0311, -0.0053, 0.0048, ..., -0.0169, -0.0310, 0.0557],
[-0.0963, -0.0814, -0.0756, ..., -0.0806, -0.0201, -0.0569]],
device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.0457, 0.0188, 0.1552, 0.1725, 0.0768, 0.0375, -0.1205, -0.2278,
-0.1069, -0.0719], device='cuda:0'))])},
{'ratio': 0.25,
'bias': 32,
'train_losses': [277.5726136661949,
246.76721250220328,
217.55644401601054,
202.30030791880156,
193.79293381834947,
187.16485911888603,
184.83406204806988,
181.04838355819177,
179.40075896922207,
177.0244756843407,
174.90931846354852,
171.74831506552704,
171.3498843022042,
170.68696618662662,
168.95542285579663],
'test_losses': [262.80505554348815,
232.33772577958948,
205.36929546617995,
196.7971724061405,
189.24025261635873,
182.9469838142395,
179.53693862989837,
176.41375040540507,
174.58643403707765,
175.19444182339836,
169.40196472055771,
168.91480136385152,
166.00268106834562,
166.4546140128491,
164.73339196747423],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[ 0.0100, 0.2946, -0.0551],
[-0.0103, 0.1757, 0.0156],
[-0.0157, -0.2399, -0.2463]],
[[-0.0497, 0.2937, 0.1340],
[ 0.0525, 0.1048, 0.0383],
[-0.0889, -0.1955, -0.1954]],
[[ 0.1956, -0.0904, 0.1445],
[-0.0478, 0.1098, -0.0610],
[ 0.0140, -0.1605, -0.1123]]],
[[[-0.0942, 0.1871, 0.2544],
[-0.2052, 0.1142, -0.0524],
[-0.0788, 0.0734, -0.0346]],
[[-0.0078, 0.0870, 0.2756],
[-0.1114, -0.0728, 0.1208],
[-0.0016, -0.1382, 0.0007]],
[[-0.1013, 0.1862, 0.2446],
[-0.2185, -0.1677, -0.1301],
[ 0.0921, -0.2298, -0.0284]]],
[[[-0.3232, 0.0855, -0.0620],
[-0.1389, 0.1619, 0.0552],
[ 0.1118, -0.0442, 0.1028]],
[[-0.2410, -0.2306, 0.1264],
[-0.2207, 0.0094, 0.2777],
[ 0.0181, 0.1348, 0.1690]],
[[-0.0371, -0.0149, 0.0239],
[-0.0867, -0.0771, 0.1022],
[ 0.0816, -0.1765, 0.1592]]],
[[[-0.1416, -0.1806, -0.1124],
[-0.0726, 0.1915, -0.0175],
[ 0.2385, -0.0242, 0.0610]],
[[-0.2959, -0.1533, -0.3018],
[ 0.1708, 0.0367, 0.0786],
[ 0.1880, 0.2002, 0.1815]],
[[-0.0638, -0.0259, 0.0660],
[ 0.0171, -0.1553, -0.1196],
[ 0.0598, 0.1031, 0.1595]]],
[[[-0.2064, -0.0471, -0.0389],
[ 0.1560, 0.0035, 0.0254],
[ 0.2496, 0.0175, -0.1141]],
[[-0.1352, -0.0640, -0.1495],
[ 0.1372, 0.1730, -0.0914],
[ 0.1309, 0.1874, -0.0182]],
[[-0.0334, -0.2022, -0.1155],
[ 0.1338, 0.0200, 0.0489],
[ 0.1638, 0.0171, -0.1710]]],
[[[ 0.0659, -0.0450, 0.1114],
[ 0.0533, 0.0703, -0.1302],
[ 0.0565, 0.0108, -0.0315]],
[[ 0.1588, -0.1737, -0.1152],
[-0.1245, -0.0479, -0.2689],
[ 0.0661, -0.1127, -0.1191]],
[[ 0.0877, -0.0800, 0.1104],
[ 0.1652, 0.1190, 0.0133],
[-0.0049, -0.1084, 0.2131]]],
[[[-0.1805, 0.1108, 0.1674],
[-0.0404, -0.1076, -0.1366],
[ 0.0362, 0.1183, 0.1758]],
[[-0.0124, 0.0087, -0.2291],
[ 0.0564, 0.0830, -0.2240],
[ 0.1620, 0.3663, -0.0021]],
[[-0.1748, -0.2514, -0.1544],
[-0.0093, 0.0846, -0.0146],
[ 0.2122, -0.0502, 0.0685]]],
[[[-0.1365, 0.1944, 0.1052],
[-0.2053, -0.0875, 0.2700],
[-0.3219, 0.1127, 0.0716]],
[[-0.0174, 0.0493, 0.2416],
[-0.0731, 0.0870, 0.0350],
[-0.3520, 0.1140, 0.0127]],
[[-0.0032, -0.1309, 0.1441],
[ 0.1040, -0.1640, 0.0579],
[-0.1093, 0.0465, 0.0391]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([ 0.1997, -0.3920, 0.0846, 0.1822, 0.2618, -0.2506, -0.3475, 0.2582],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[ 1.3620e-02, 1.2428e-01, 9.4496e-02],
[ 8.3724e-02, 9.2961e-02, 1.0677e-01],
[ 4.1327e-02, 1.0991e-01, 5.1422e-02]],
[[ 4.9765e-02, -1.4297e-01, -1.1900e-01],
[-1.4616e-01, -9.5421e-02, -6.4602e-02],
[-1.3005e-01, -1.9082e-01, -1.6324e-03]],
[[-3.2978e-02, -1.4633e-01, 1.4895e-02],
[ 5.9082e-03, -7.4056e-02, -1.2950e-01],
[-5.4175e-02, -8.0667e-03, -8.6637e-02]],
...,
[[-1.3871e-01, -5.9525e-02, 1.2065e-01],
[-4.8545e-02, -8.7784e-02, -7.9508e-02],
[ 8.5852e-02, 1.1516e-01, -8.8719e-02]],
[[-3.0877e-01, 4.4774e-02, 3.0227e-02],
[ 2.4373e-01, 1.4917e-01, 3.4227e-01],
[-3.2549e-01, -3.9066e-01, -1.6490e-01]],
[[ 9.9129e-02, 5.9744e-02, 1.0095e-01],
[ 5.5123e-02, -1.2709e-03, 1.1084e-02],
[ 6.5302e-02, 1.6085e-01, 1.2191e-01]]],
[[[ 1.0253e-01, 2.3690e-02, 1.1983e-02],
[ 9.5141e-02, -7.6898e-02, 8.7611e-02],
[-4.5885e-02, 8.5318e-02, -1.2567e-01]],
[[-8.1624e-02, -1.1154e-01, -3.6365e-01],
[-1.6359e-01, 4.1697e-02, -1.4798e-02],
[-1.4143e-01, -1.0329e-01, 1.5000e-02]],
[[ 6.7576e-02, 1.5296e-03, 1.9687e-02],
[ 6.6814e-02, 1.7123e-01, 2.9348e-02],
[-1.3507e-01, 3.9610e-02, 9.9019e-02]],
...,
[[-9.0285e-02, 3.6237e-02, -1.5317e-01],
[-1.9042e-01, 5.5356e-03, -2.6552e-02],
[-8.7599e-03, -1.7698e-01, -1.0664e-01]],
[[-1.8344e-01, -1.4017e-01, 2.6048e-02],
[-1.1049e-02, -2.2969e-01, 1.2056e-01],
[ 9.8103e-03, -9.2729e-02, -2.4637e-01]],
[[ 1.1746e-01, 5.9602e-02, -3.5500e-02],
[ 6.0089e-02, 9.2425e-02, 2.2169e-02],
[ 4.5763e-02, -4.6459e-02, 7.7972e-02]]],
[[[ 3.6589e-02, 1.0564e-01, 1.8862e-01],
[ 4.1278e-02, 6.7792e-02, 1.1472e-01],
[ 1.2261e-01, -9.0777e-02, -5.1172e-02]],
[[-1.5987e-01, -6.4698e-02, -2.5587e-02],
[-7.5485e-02, -1.3297e-01, -1.5434e-01],
[-1.4512e-01, -7.3237e-02, -1.8963e-02]],
[[ 8.1489e-02, 1.2597e-01, -1.2766e-01],
[ 2.9506e-02, -1.4454e-02, 1.6428e-03],
[ 8.5956e-02, 1.2382e-01, 1.2809e-01]],
...,
[[ 6.5401e-02, 3.1028e-02, 2.1465e-02],
[-5.7852e-02, 1.6189e-01, -1.1738e-01],
[-7.1305e-03, 9.9335e-02, 1.6592e-02]],
[[ 1.6559e-02, 6.3352e-02, -4.7754e-04],
[ 1.0858e-01, 6.4050e-02, -1.9046e-02],
[ 1.9536e-02, -8.8489e-02, -1.7785e-01]],
[[-3.7858e-01, -2.3954e-02, 1.4447e-01],
[-5.1044e-01, -2.8201e-01, -8.7746e-02],
[-2.8859e-01, -3.3321e-01, -6.1923e-02]]],
...,
[[[ 2.3539e-01, -4.3586e-02, -1.5912e-01],
[-8.1821e-02, -3.7108e-01, -2.9699e-01],
[-2.9236e-01, -3.8225e-01, -8.2883e-02]],
[[ 1.7396e-01, -8.0602e-02, -1.2286e-01],
[ 6.6201e-02, -5.9797e-02, -2.1049e-01],
[-2.4855e-01, -2.3690e-01, -1.3369e-01]],
[[-1.5602e-01, -1.2986e-01, 1.8083e-01],
[-4.0959e-02, 5.0537e-02, 1.9036e-01],
[ 7.0890e-02, 1.0707e-01, 2.4022e-01]],
...,
[[-1.1826e-01, 1.1606e-01, 2.6488e-02],
[ 3.7691e-02, 6.1375e-02, -2.1623e-01],
[-1.1368e-01, -1.3586e-01, -2.5514e-02]],
[[-2.6837e-01, -2.5673e-01, -6.5848e-02],
[-7.8424e-02, 6.7750e-02, 2.4033e-02],
[-6.1688e-02, -1.3310e-01, 2.1515e-02]],
[[ 3.7126e-02, -4.2009e-02, 1.5680e-01],
[ 1.7710e-02, 1.1312e-01, -1.9599e-02],
[-4.7111e-02, -2.0200e-02, 7.5489e-02]]],
[[[ 1.0903e-01, -5.3315e-02, -4.6854e-02],
[-1.9544e-01, -2.0314e-01, -1.6132e-01],
[-1.1834e-02, -2.8218e-01, -1.5662e-01]],
[[-2.0160e-01, -1.3538e-01, -8.2634e-02],
[-1.0053e-01, -1.5197e-01, -4.9511e-02],
[ 7.0280e-02, -9.9140e-02, -1.1851e-02]],
[[-3.5792e-02, 2.4146e-02, -5.4181e-02],
[ 4.0509e-02, -1.4920e-01, -1.7348e-01],
[ 1.6286e-01, 1.1665e-01, -8.1026e-02]],
...,
[[-3.0149e-02, 9.1533e-02, 2.6796e-02],
[-6.9416e-02, -1.0534e-01, 6.2502e-02],
[-1.5096e-01, -3.6150e-02, 3.2228e-02]],
[[-1.3174e-01, -2.9837e-01, -2.1005e-01],
[-2.3175e-02, -4.0121e-02, -7.1504e-02],
[-8.8302e-02, -2.3963e-01, -1.0387e-01]],
[[-1.3962e-01, -1.4934e-01, -3.8099e-02],
[-9.4508e-03, -2.1744e-01, -2.1883e-01],
[ 1.7506e-01, -1.8988e-01, -2.3017e-01]]],
[[[-5.3139e-02, -1.0287e-01, 3.2151e-02],
[-1.4373e-01, -1.6874e-01, -1.7721e-01],
[-1.9132e-01, -1.6119e-01, 1.8198e-02]],
[[ 8.2193e-02, -2.3693e-02, -2.5836e-03],
[-3.0239e-01, -4.4161e-01, -3.8120e-01],
[-3.2274e-01, -3.6436e-01, -2.1914e-01]],
[[-5.8420e-02, -4.4915e-02, -1.6489e-01],
[-2.8776e-02, 2.3495e-02, 5.7767e-02],
[ 9.9803e-03, 7.4587e-02, -4.9639e-02]],
...,
[[ 5.7738e-02, -7.8450e-02, -3.5046e-02],
[-1.2820e-01, -6.8160e-02, -8.3358e-03],
[ 4.3206e-03, -2.8340e-02, -1.0847e-01]],
[[-4.3259e-01, -3.0472e-01, -2.6520e-01],
[-1.0133e-02, 8.4348e-02, -9.8978e-04],
[-1.1112e-01, -1.1413e-01, -4.1368e-03]],
[[ 1.9892e-01, 8.6754e-02, 9.3385e-02],
[ 1.2151e-01, -9.2621e-03, 1.1726e-01],
[ 2.9722e-02, 9.9780e-03, 7.7806e-02]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([ 0.0006, 0.1913, -0.2772, -0.2821, 0.1603, 0.2709, 0.0925, 0.1775,
0.1763, 0.0316, 0.2402, 0.2093, -0.1062, 0.0081, 0.0818, 0.1868],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[ 5.0392e-02, 1.2658e-01, 1.0084e-01],
[-1.7678e-01, 9.1186e-02, 1.3758e-01],
[-2.4461e-01, -7.9450e-02, 7.6789e-02]],
[[-4.2914e-02, 1.0882e-01, 1.4280e-01],
[-9.5188e-02, 7.2392e-02, 1.2642e-01],
[-3.2784e-01, -7.1177e-02, 7.8314e-02]],
[[-2.5825e-01, -3.1372e-01, 2.4600e-01],
[-8.5868e-02, -9.7883e-02, -9.8528e-02],
[-1.8689e-01, -1.6173e-01, -1.4444e-03]],
...,
[[-1.8092e-01, -1.4880e-01, -2.3322e-04],
[-4.0520e-02, -7.7332e-02, -9.4358e-02],
[-2.5369e-02, -8.3955e-02, -3.8562e-02]],
[[-1.0389e-01, -4.1002e-02, -3.5370e-02],
[ 1.6509e-01, -1.8846e-01, -9.6857e-02],
[ 7.1724e-02, 1.2505e-01, -1.0701e-01]],
[[-7.0491e-03, 6.9867e-02, -3.0277e-02],
[-5.8183e-03, 3.4707e-02, -1.1641e-01],
[-4.1027e-03, -3.9969e-02, -8.8417e-02]]],
[[[ 7.0288e-02, -5.9709e-02, -2.3543e-01],
[ 1.7748e-01, 1.7782e-01, -6.9486e-02],
[ 5.1970e-02, 8.2703e-02, 4.3564e-02]],
[[ 1.1053e-01, -3.1316e-02, 9.6945e-02],
[ 2.7621e-01, 2.2072e-01, -1.5577e-01],
[ 1.1073e-01, 1.1881e-01, -6.8022e-02]],
[[-1.8702e-01, -6.6889e-03, -5.7075e-02],
[ 1.7796e-01, -7.9653e-02, -1.2568e-02],
[ 1.3635e-01, 9.2580e-02, -8.7388e-02]],
...,
[[-7.8281e-02, -1.9344e-01, 5.1865e-02],
[-1.3144e-01, -2.4943e-01, -1.8934e-01],
[-4.0558e-03, -7.3389e-02, -4.2413e-01]],
[[ 2.7881e-02, 4.1947e-02, 8.4644e-02],
[-3.0875e-01, 7.0583e-02, 1.8692e-01],
[-7.3956e-04, -4.2595e-02, 1.3755e-01]],
[[-7.3720e-02, 2.9216e-02, -2.4104e-01],
[ 1.5238e-02, 2.4822e-02, -2.6480e-01],
[ 9.1284e-02, 1.1615e-01, -5.1957e-02]]],
[[[ 1.1838e-01, 1.3262e-01, 1.8711e-01],
[ 2.3331e-03, 5.5525e-02, 6.8091e-02],
[-2.2429e-01, -1.3275e-01, 5.5690e-02]],
[[-4.1477e-02, -8.2217e-02, -6.6881e-02],
[-1.6541e-01, -2.2654e-02, -7.9922e-02],
[-1.4719e-01, -8.5056e-02, -8.8946e-02]],
[[-1.4874e-01, -3.3334e-02, 7.7768e-02],
[-1.3536e-01, -2.0818e-03, 6.8249e-02],
[-6.7731e-02, -2.8996e-02, 1.1097e-01]],
...,
[[ 2.9121e-02, -1.2316e-01, -9.9007e-03],
[-8.9639e-03, 1.0914e-01, 1.8664e-01],
[-5.5296e-02, 3.9526e-02, 1.1707e-01]],
[[-9.5623e-02, -8.3654e-02, 2.1194e-01],
[-5.9065e-02, -1.2084e-01, 1.2827e-01],
[-3.1609e-02, -1.3405e-01, -3.1201e-02]],
[[ 4.7501e-02, -3.8945e-02, -9.0152e-02],
[-2.4476e-02, 6.4613e-02, 1.5503e-01],
[-1.6024e-02, -6.8101e-02, 1.0450e-01]]],
...,
[[[ 9.2726e-02, -2.1649e-01, -2.3961e-02],
[ 9.8988e-02, -1.8227e-02, -7.6664e-02],
[ 2.3705e-01, -4.6711e-02, -2.3068e-01]],
[[ 1.1139e-01, 1.3256e-01, 6.9309e-02],
[-1.6710e-01, -6.8109e-02, -3.5791e-02],
[-1.3628e-01, -1.1905e-01, -3.8208e-02]],
[[ 1.2303e-01, -3.8577e-02, -1.0101e-01],
[ 7.3648e-02, 2.6425e-02, -1.2109e-01],
[ 8.0301e-02, 1.3970e-01, -5.5345e-02]],
...,
[[-1.2973e-01, -7.5642e-02, -5.9359e-03],
[-2.2731e-01, 7.7089e-02, -2.3488e-02],
[-2.2651e-01, -1.8540e-01, 8.5991e-02]],
[[ 7.1560e-02, -8.2838e-02, 2.4663e-02],
[ 1.4722e-01, 1.3382e-02, -4.5012e-03],
[-1.2017e-01, 9.7113e-02, -2.0050e-02]],
[[-1.4880e-02, -2.0203e-01, -2.6928e-01],
[-1.2229e-01, -1.7041e-01, -2.6815e-01],
[-2.6299e-02, -2.3990e-01, -1.7676e-01]]],
[[[-3.4008e-02, -6.5010e-03, -1.3789e-01],
[-2.8353e-01, -1.5613e-02, -2.8818e-02],
[-1.2319e-01, -6.1440e-03, -1.0682e-01]],
[[-6.4701e-02, -1.8173e-01, -8.2961e-02],
[-8.0759e-02, -1.9147e-02, -6.0232e-02],
[-4.7838e-02, -7.4318e-02, -2.9540e-02]],
[[-6.9723e-03, -1.9221e-02, 8.8207e-02],
[-6.9025e-02, -2.8972e-02, -2.1124e-02],
[-7.5736e-03, -1.5044e-02, -2.7623e-02]],
...,
[[-1.3795e-01, -1.3479e-01, -7.3509e-02],
[-7.9985e-02, -1.0779e-01, -1.5466e-01],
[-4.9360e-03, -4.8081e-02, 6.4789e-02]],
[[-1.2585e-01, -6.2250e-02, -4.3213e-02],
[-1.4893e-01, -1.4588e-01, -8.0716e-02],
[-1.6967e-01, -1.4915e-01, -2.4132e-01]],
[[-7.4894e-02, -1.3104e-01, -7.9569e-02],
[-8.2506e-02, -2.7744e-02, -1.8330e-01],
[-6.7307e-02, -1.8797e-01, 1.4542e-02]]],
[[[ 6.5544e-02, 7.2055e-02, 6.0830e-03],
[-1.0384e-01, -1.3322e-01, -8.1981e-02],
[-2.0941e-01, -2.9606e-01, -2.4238e-01]],
[[-2.1970e-02, 9.5156e-02, 9.9025e-03],
[-1.8492e-02, 1.2611e-01, 1.5469e-02],
[-2.2390e-02, -1.4733e-01, -2.8699e-02]],
[[ 8.1817e-02, 1.9060e-01, 1.1321e-01],
[ 9.5898e-02, 1.1840e-01, 1.3832e-01],
[ 1.7444e-02, -1.2182e-02, -5.6500e-02]],
...,
[[-5.6279e-02, -8.9554e-03, -1.2282e-01],
[ 5.8651e-02, 6.1565e-02, -1.1306e-03],
[ 1.5420e-01, -4.6325e-04, -1.1401e-02]],
[[-2.5455e-01, -2.8787e-01, -2.9078e-01],
[-1.2216e-01, -1.3948e-01, -1.0348e-01],
[ 4.6911e-03, -1.4742e-01, -1.5727e-01]],
[[-5.2841e-02, -2.4617e-02, 4.8205e-04],
[-5.3716e-02, -1.3026e-02, 5.6758e-02],
[ 8.4339e-02, 9.7325e-02, 1.0109e-01]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([ 0.0288, 0.0871, -0.0584, 0.2013, 0.1547, 0.1233, 0.0719, 0.0686,
-0.0724, 0.2317, -0.0120, 0.1666, 0.0855, -0.0418, 0.2529, 0.0498,
0.1376, -0.0860, 0.1080, 0.0156, -0.0060, 0.0433, 0.0948, 0.2631,
0.1884, -0.0218, 0.1699, -0.2254, -0.1436, -0.0216, -0.0635, 0.0718],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[ 0.0552, -0.0157, -0.0032],
[-0.0825, -0.0274, 0.1041],
[ 0.0965, -0.0272, 0.0145]],
[[ 0.0421, 0.2242, 0.0318],
[ 0.0739, -0.1977, -0.1050],
[ 0.0186, -0.1697, -0.1935]],
[[ 0.0812, 0.1896, -0.0478],
[-0.2072, -0.0352, -0.1347],
[-0.1351, -0.0440, -0.2481]],
...,
[[-0.0716, -0.0562, 0.0672],
[-0.1085, -0.0158, -0.0189],
[-0.1147, 0.0392, -0.0715]],
[[ 0.1403, 0.1035, 0.0885],
[ 0.1557, -0.0609, -0.0377],
[ 0.1448, -0.0165, 0.0325]],
[[ 0.0229, 0.0800, 0.1726],
[-0.0043, -0.0568, 0.0536],
[-0.0959, -0.2296, 0.0193]]],
[[[ 0.0833, -0.0230, -0.1522],
[-0.1499, -0.1589, -0.0567],
[-0.0932, -0.1788, -0.0767]],
[[-0.0131, 0.0053, 0.0971],
[ 0.1062, -0.2245, -0.0858],
[ 0.0565, -0.0415, -0.0393]],
[[ 0.0489, -0.0377, -0.0285],
[ 0.0165, -0.0568, -0.0638],
[ 0.0228, -0.0838, 0.0786]],
...,
[[ 0.1044, 0.1224, -0.1023],
[-0.0755, 0.1544, 0.1679],
[-0.0159, 0.0483, 0.2011]],
[[-0.0822, 0.0032, 0.0348],
[-0.0352, 0.0470, 0.0649],
[-0.0086, 0.0955, 0.0796]],
[[-0.0047, -0.1749, -0.0068],
[ 0.0151, -0.1070, -0.0479],
[-0.1325, -0.1220, -0.0400]]],
[[[-0.0018, -0.0642, -0.0731],
[-0.0393, -0.0059, -0.0203],
[-0.0838, 0.0446, -0.0226]],
[[-0.0119, 0.0543, -0.0451],
[-0.0136, -0.0348, -0.0456],
[-0.0385, -0.0920, -0.0323]],
[[ 0.0345, 0.0015, 0.0414],
[-0.0604, -0.0641, -0.0210],
[ 0.0017, -0.0423, -0.0122]],
...,
[[-0.0256, 0.0521, -0.0715],
[-0.0130, -0.0314, 0.0512],
[-0.0447, 0.0058, -0.0354]],
[[-0.0415, 0.0075, 0.0461],
[-0.0280, 0.0343, -0.0361],
[-0.0047, 0.0396, 0.0313]],
[[ 0.0033, 0.0042, -0.0480],
[-0.0452, 0.0303, 0.0153],
[-0.0147, -0.0409, -0.0425]]],
...,
[[[ 0.1223, 0.0752, -0.0400],
[ 0.0070, 0.0451, -0.0479],
[ 0.1024, -0.1135, -0.0969]],
[[-0.3602, 0.0262, 0.0905],
[ 0.1161, -0.2258, 0.2685],
[ 0.1489, 0.0331, -0.2031]],
[[-0.1108, 0.0427, 0.0871],
[-0.1146, -0.1286, 0.0110],
[-0.0031, -0.0932, -0.0789]],
...,
[[-0.0173, -0.0039, -0.0132],
[-0.1359, 0.0522, -0.0045],
[-0.0032, -0.0042, 0.1475]],
[[ 0.0119, 0.0290, -0.0516],
[-0.0362, -0.0502, 0.0187],
[ 0.0100, 0.1028, -0.0383]],
[[ 0.0147, 0.0618, 0.0448],
[-0.0431, 0.0039, -0.0038],
[-0.0303, -0.0173, 0.1168]]],
[[[-0.1592, -0.1619, -0.1738],
[-0.1039, 0.0388, -0.0830],
[-0.0699, 0.1148, -0.0051]],
[[ 0.0066, -0.0385, -0.0824],
[ 0.0805, 0.0816, 0.1277],
[ 0.0620, -0.1153, -0.0472]],
[[-0.2189, -0.2362, -0.1990],
[-0.1490, -0.1618, -0.2127],
[-0.0590, 0.1944, 0.1341]],
...,
[[-0.1047, -0.0533, -0.0320],
[-0.0196, 0.0542, 0.0456],
[-0.0586, -0.0704, 0.0452]],
[[ 0.0120, -0.0361, -0.0110],
[-0.0230, 0.0970, 0.0712],
[-0.0957, 0.0171, -0.0160]],
[[-0.1684, -0.0659, -0.1555],
[-0.0971, -0.1775, -0.3067],
[ 0.0787, 0.0110, -0.1373]]],
[[[-0.1942, -0.0268, 0.0897],
[-0.2515, -0.0970, -0.0262],
[-0.2560, -0.0886, -0.1076]],
[[-0.2583, -0.0972, -0.0416],
[-0.1211, -0.0349, 0.0783],
[ 0.1117, -0.0730, 0.0080]],
[[-0.0534, 0.0210, 0.0350],
[-0.0747, -0.0203, -0.1368],
[ 0.0582, 0.0602, -0.1706]],
...,
[[-0.2707, -0.1342, -0.1972],
[-0.2661, -0.2514, -0.0298],
[-0.0032, -0.1083, -0.1617]],
[[ 0.0540, -0.0199, 0.0612],
[-0.0488, 0.1083, 0.1253],
[-0.1092, -0.0212, 0.0745]],
[[-0.2672, -0.1317, -0.0291],
[-0.3041, -0.0199, 0.0500],
[-0.2615, -0.2162, -0.1542]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([-0.0508, -0.1617, -0.0142, 0.2107, 0.0958, 0.0795, -0.0424, -0.2118,
0.0423, -0.0359, -0.0097, 0.1985, -0.1253, -0.0880, 0.0811, 0.0741,
0.1126, 0.0321, -0.0757, -0.0569, 0.0472, 0.0772, 0.1125, -0.0267,
0.1383, 0.1249, 0.0075, -0.0163, 0.0775, 0.0895, 0.0406, 0.0398],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[-1.0023e-01, -1.8911e-01, -6.5813e-03],
[ 5.8600e-02, -1.4139e-01, 1.4743e-02],
[ 1.5576e-01, 5.4111e-02, -1.1950e-01]],
[[ 6.2562e-02, -1.6372e-02, 3.9491e-02],
[ 1.4276e-02, 1.0774e-01, 1.0538e-01],
[ 6.4405e-02, -6.6940e-02, 5.0552e-02]],
[[-9.5339e-03, 6.5975e-03, -5.1172e-02],
[ 2.4515e-02, 5.6023e-02, 1.5870e-02],
[-6.5802e-03, 5.0329e-02, -2.7831e-02]],
...,
[[ 1.0342e-01, 2.5613e-01, 1.7709e-01],
[-3.0801e-02, 3.7551e-02, -1.0115e-01],
[-9.1173e-02, -2.2143e-01, -3.0019e-01]],
[[ 5.8362e-02, 1.3732e-01, 1.8448e-01],
[-5.4848e-02, -6.8973e-02, -5.0266e-02],
[ 1.7038e-01, -8.8331e-02, -9.8406e-02]],
[[ 8.9980e-02, -5.1111e-03, 5.4061e-02],
[ 3.2140e-02, 9.9803e-02, 4.5775e-02],
[ 8.2465e-02, -1.3075e-01, -2.7994e-01]]],
[[[-1.8719e-01, -2.0299e-01, -2.8932e-04],
[-2.3187e-02, 1.4766e-02, -2.0849e-01],
[ 1.9167e-02, -6.7776e-02, -1.0169e-01]],
[[ 1.7408e-02, 1.0732e-01, -8.6681e-02],
[-5.1928e-02, 5.2057e-02, 8.3302e-02],
[ 2.8323e-03, -5.4799e-02, -9.0793e-02]],
[[ 3.7756e-02, -1.3375e-02, -2.2343e-02],
[ 6.9789e-02, 5.6640e-03, -3.2277e-02],
[ 3.1008e-02, -8.7412e-03, 5.1621e-02]],
...,
[[-5.5733e-02, 5.1384e-02, -3.7620e-02],
[ 5.3294e-02, -1.4317e-01, -8.0452e-02],
[-3.2728e-02, -2.2474e-01, -3.5007e-01]],
[[-1.2117e-01, -8.2694e-02, 5.3898e-02],
[-7.7282e-02, -6.3814e-02, 1.0433e-01],
[ 4.6411e-02, -2.2845e-01, -3.6513e-02]],
[[-1.7858e-01, -2.3516e-01, 7.7510e-02],
[-1.4578e-01, -5.8642e-02, 1.4157e-01],
[-6.7304e-02, 3.4761e-02, 4.6328e-02]]],
[[[-1.2450e-01, -1.4125e-01, -1.7552e-01],
[-1.0422e-01, 8.9168e-02, 7.2998e-02],
[-1.9172e-01, 6.9068e-02, 1.4343e-01]],
[[ 6.7695e-02, -7.6410e-02, -1.3113e-01],
[-1.5986e-02, -1.1362e-01, -1.9604e-01],
[-9.9146e-02, -9.0058e-02, -1.0527e-01]],
[[ 2.6746e-02, -2.7615e-02, 1.3712e-02],
[-3.1592e-02, 6.1626e-02, 2.5418e-02],
[ 5.0718e-02, 4.9314e-02, 2.5193e-02]],
...,
[[ 3.1421e-02, 1.1784e-01, -9.1092e-02],
[ 1.2210e-01, 7.6431e-02, -1.5875e-01],
[ 2.0742e-01, 4.0090e-02, -2.2553e-01]],
[[ 1.3495e-01, 1.2704e-02, -1.3297e-01],
[ 3.8192e-02, 1.8774e-03, -1.1338e-01],
[ 8.9061e-02, 1.7681e-02, -4.6658e-02]],
[[ 1.0188e-01, 3.5601e-02, -4.8503e-02],
[ 3.6170e-02, 5.3056e-02, -1.2802e-01],
[ 1.6649e-01, 3.6032e-01, 1.3424e-01]]],
...,
[[[ 1.7406e-01, 1.0503e-01, 6.9936e-02],
[ 8.0386e-02, 2.4526e-01, 3.2231e-02],
[-2.7084e-02, -6.3210e-04, 1.5768e-01]],
[[ 1.6276e-01, 2.3347e-01, -9.9112e-02],
[ 6.2246e-02, 1.4976e-01, -2.3867e-01],
[-5.1209e-02, 7.0937e-02, -7.9332e-02]],
[[-4.2676e-02, 2.4698e-02, -1.1264e-02],
[-2.5074e-02, 3.0927e-02, -1.1868e-02],
[-4.8760e-02, 2.0739e-02, 7.4642e-02]],
...,
[[-1.2757e-01, -2.6935e-01, -2.3759e-01],
[ 3.9648e-02, -9.0641e-02, -1.2642e-01],
[ 1.7452e-01, 5.9463e-02, 4.1789e-02]],
[[-1.4989e-01, -2.0722e-01, -1.1030e-01],
[-2.8996e-02, -1.4400e-01, -2.2403e-01],
[ 9.1814e-02, -1.2170e-02, -8.5219e-02]],
[[-6.8306e-02, -1.1086e-02, -1.2252e-01],
[-6.8262e-02, -8.7761e-02, -1.6765e-01],
[-5.0273e-02, 2.4462e-02, -4.2858e-02]]],
[[[ 3.4402e-02, 5.6123e-03, -6.3157e-02],
[-3.6208e-02, -4.0164e-02, -2.9146e-01],
[-7.4696e-02, -1.0821e-01, -8.7676e-02]],
[[-9.3153e-02, 7.2812e-02, -1.0901e-01],
[-9.4630e-02, -1.2915e-02, -1.5527e-02],
[ 8.3461e-02, 2.9336e-02, -6.2783e-02]],
[[-3.6266e-02, -5.7263e-02, -6.5639e-02],
[-4.0512e-02, -5.2057e-02, -3.7922e-02],
[ 1.9973e-02, 4.0148e-02, -6.5252e-03]],
...,
[[-1.4295e-01, 5.9844e-03, 7.5349e-02],
[-1.3735e-01, 2.2565e-01, 1.8110e-01],
[-1.2686e-02, 6.5580e-02, 2.9763e-02]],
[[-7.2396e-02, 1.1300e-01, 2.3245e-02],
[ 7.2009e-02, 1.9936e-01, -2.8796e-02],
[ 6.3436e-03, -1.4116e-01, -1.8723e-01]],
[[-7.6212e-03, 3.7087e-02, 1.8395e-02],
[ 8.1898e-02, 1.0430e-01, 8.2670e-04],
[ 1.2765e-01, 7.9813e-02, -1.9260e-01]]],
[[[-2.0530e-01, -1.2451e-01, -1.6383e-01],
[-6.9039e-02, -4.2945e-02, -1.4456e-02],
[-1.0104e-01, -1.3974e-01, -2.7173e-02]],
[[ 1.1503e-01, -2.9574e-02, -7.5397e-02],
[-2.8067e-02, -5.6357e-02, -1.3550e-01],
[-6.6005e-02, -6.1977e-02, -4.6439e-02]],
[[ 5.9948e-02, 5.3026e-02, -4.4878e-02],
[ 4.4134e-02, -2.2072e-03, -2.1431e-02],
[ 2.2679e-02, 1.9698e-02, 5.6714e-03]],
...,
[[ 1.6240e-01, 8.1138e-02, -1.9363e-02],
[-4.7991e-02, -6.1459e-02, -1.2255e-01],
[-1.6270e-01, -2.0818e-01, -1.9232e-01]],
[[-1.4374e-01, -1.6666e-01, -1.0111e-01],
[-1.0775e-01, -3.9803e-01, -2.7377e-01],
[ 9.1345e-02, -1.8753e-01, -1.6952e-01]],
[[ 1.5529e-01, -1.0861e-01, -6.5861e-02],
[ 7.8880e-03, -1.0752e-01, -1.2603e-01],
[ 3.0833e-02, -1.5872e-01, -1.5130e-02]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([ 0.0991, -0.1144, 0.0546, 0.1736, 0.0069, 0.1066, -0.1240, 0.0177,
-0.0173, -0.0989, 0.1697, -0.0468, -0.1449, -0.0495, 0.0046, -0.0059,
0.0911, -0.0772, -0.0809, 0.1337, -0.1043, 0.0634, 0.1874, -0.0313,
-0.0785, 0.0659, -0.0972, -0.0646, 0.2308, 0.0405, -0.0909, 0.1295],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[-1.6137e-01, 6.4741e-03, -1.6751e-02],
[-1.2084e-02, -7.6672e-03, 2.0245e-02],
[-5.5653e-02, -1.0907e-01, -1.1004e-01]],
[[-2.0124e-02, 1.8716e-01, -1.9738e-03],
[-2.4751e-03, 1.9817e-01, 1.5683e-01],
[-1.3436e-01, -8.4722e-02, -3.6182e-02]],
[[-1.6563e-01, -4.9075e-02, 5.0118e-02],
[-9.2124e-03, 2.6627e-03, 7.5160e-03],
[-2.8494e-02, 1.1029e-01, 1.2558e-01]],
...,
[[-4.7059e-02, -3.0464e-02, -4.3831e-02],
[-3.6703e-01, -1.5602e-01, 1.3006e-01],
[-2.9960e-01, -1.5344e-02, 1.2958e-01]],
[[-6.0967e-02, 1.6290e-02, 1.0367e-01],
[ 8.8653e-03, -5.8371e-02, -3.0993e-01],
[ 1.1097e-01, -1.4040e-01, 1.8816e-01]],
[[-1.6545e-01, -2.0717e-01, 2.7669e-02],
[ 3.7892e-02, 4.9791e-02, 5.3299e-02],
[ 1.4928e-01, 1.1072e-01, 5.5605e-02]]],
[[[ 5.4524e-02, -9.5390e-02, -5.0911e-02],
[ 1.0085e-01, 1.9101e-01, -1.3691e-01],
[ 1.2643e-02, 1.2658e-01, -2.0188e-03]],
[[-5.9362e-02, -1.9918e-01, -1.9785e-01],
[ 1.5370e-01, 1.0108e-01, 7.5100e-02],
[-8.7504e-02, 1.8409e-01, 2.3408e-01]],
[[-1.4900e-01, -1.2836e-01, -4.8081e-02],
[-8.1300e-02, -1.9090e-02, -6.4670e-02],
[ 3.5844e-02, 1.9221e-01, 1.1515e-01]],
...,
[[-4.5114e-02, 7.5605e-02, 1.3785e-01],
[-1.1547e-01, -1.0434e-01, -4.7354e-02],
[-1.7075e-01, 1.5099e-02, 5.3319e-02]],
[[-8.9839e-02, -6.3424e-02, 1.6028e-01],
[-6.7472e-03, 8.3009e-02, 9.8307e-02],
[-1.2673e-01, -4.7127e-02, -8.9428e-02]],
[[-1.2228e-01, -3.5776e-01, -3.6247e-01],
[ 9.9195e-03, -1.2408e-01, -1.9666e-01],
[ 3.3241e-02, 5.5712e-03, -6.9554e-03]]],
[[[ 1.2606e-02, -4.3253e-02, -3.0759e-02],
[-5.6455e-02, -5.3362e-02, -3.3909e-02],
[-6.3749e-02, -9.4522e-02, -2.8476e-02]],
[[ 2.9495e-02, -1.5642e-02, -2.6843e-02],
[ 3.9246e-03, -3.8295e-02, 4.7917e-04],
[-1.0895e-02, -4.8179e-02, -2.7366e-02]],
[[-1.8656e-02, -6.1094e-02, 5.8312e-03],
[-1.1718e-02, 1.7821e-02, 1.9630e-02],
[-1.4814e-02, -4.8462e-02, -3.8358e-02]],
...,
[[ 8.5187e-03, -4.5097e-02, 4.2793e-02],
[-1.2681e-02, -2.9929e-03, -1.7258e-02],
[-4.2701e-02, -6.2026e-02, 2.2791e-02]],
[[-4.3783e-02, -2.5682e-02, -5.1346e-02],
[ 1.1885e-03, -4.2002e-02, -4.9799e-02],
[-9.9165e-03, -7.4974e-02, -2.7753e-02]],
[[-4.9907e-02, -3.2875e-02, -5.7121e-02],
[ 1.8327e-02, 1.9719e-02, -1.3105e-02],
[-9.5796e-03, 3.6103e-02, -3.2321e-04]]],
...,
[[[-5.0584e-02, -1.8759e-01, -1.1076e-01],
[-1.7705e-01, -1.1981e-01, 8.2166e-02],
[-1.9144e-01, -6.8156e-02, -5.5088e-02]],
[[-2.7664e-02, 2.0509e-01, 1.0002e-01],
[-2.9877e-02, -2.6423e-02, 6.4890e-02],
[-1.5998e-01, -1.4004e-01, -4.3287e-02]],
[[-5.7264e-03, -2.5176e-02, -2.0588e-01],
[-6.5251e-02, 2.0335e-02, 1.4135e-01],
[-7.9673e-02, 1.1513e-01, 1.9590e-01]],
...,
[[-4.2987e-03, 6.7998e-02, 1.4937e-02],
[ 1.5763e-02, -4.8142e-02, 9.7684e-02],
[ 1.0136e-01, 1.2161e-02, 3.7308e-02]],
[[-3.8533e-02, -1.3836e-01, 9.5685e-02],
[ 1.1316e-01, -2.5398e-02, -2.1957e-02],
[-4.9664e-02, -2.9493e-01, -3.3176e-01]],
[[ 9.8425e-02, 2.1290e-02, -8.6498e-03],
[ 4.0615e-03, -6.0078e-02, 2.0064e-01],
[-2.0799e-01, -1.5847e-01, -1.9746e-01]]],
[[[-1.7548e-01, -3.0290e-01, -1.2430e-01],
[ 2.2195e-01, -3.0895e-02, -1.2095e-01],
[ 1.9479e-03, 7.0882e-02, -6.4534e-02]],
[[ 1.0284e-01, -1.5795e-01, -1.2168e-01],
[ 1.3152e-01, -4.2775e-02, -3.4365e-01],
[ 2.4483e-01, 1.5405e-02, -1.0681e-01]],
[[ 4.3517e-02, -8.6327e-02, 1.4973e-01],
[ 4.1563e-02, 3.7682e-02, 8.2096e-02],
[ 8.6464e-02, 3.7084e-02, 1.4678e-01]],
...,
[[-1.1656e-01, -5.3237e-02, 5.0282e-02],
[-2.9980e-03, -6.8734e-02, 1.1990e-01],
[ 1.2004e-01, -6.9931e-02, 1.1128e-02]],
[[ 1.6513e-01, 9.1267e-02, 4.0306e-02],
[ 3.7819e-02, -3.2605e-02, 3.0293e-02],
[-5.4657e-02, 1.3328e-01, 3.6540e-02]],
[[-1.4603e-01, -2.9053e-01, -2.2720e-01],
[ 7.9119e-02, -1.5412e-01, -2.3768e-01],
[ 1.8439e-01, 3.7402e-02, -1.4669e-01]]],
[[[ 1.8414e-01, -4.0326e-03, -1.6662e-01],
[-3.7294e-02, -1.5323e-01, 1.9740e-02],
[-1.2726e-01, -1.2346e-01, -3.6577e-02]],
[[-9.1733e-02, 5.5523e-02, -3.1530e-02],
[-1.6182e-01, -9.7143e-02, 3.5352e-02],
[-2.6626e-01, -2.7137e-01, -1.9666e-01]],
[[ 8.6171e-02, 6.8106e-03, 8.8276e-02],
[-4.8043e-03, -1.3001e-01, -8.1523e-02],
[ 1.7687e-02, -1.6736e-01, -8.1290e-02]],
...,
[[-1.4769e-01, 4.4802e-02, -2.5987e-02],
[-7.5870e-03, 3.2068e-02, -2.6953e-02],
[ 1.1404e-02, 2.5643e-02, 1.3112e-01]],
[[-1.9102e-02, -2.7317e-02, -1.2375e-01],
[ 1.2535e-01, 3.1879e-02, -2.7362e-02],
[-6.6775e-02, -2.9088e-01, -1.6283e-01]],
[[ 1.0961e-01, -4.8790e-02, -1.2219e-01],
[-3.4744e-03, -1.7125e-01, -3.3404e-02],
[ 1.2024e-02, -1.4707e-01, -8.9655e-02]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([-0.0440, 0.0098, -0.0686, -0.0452, 0.1937, -0.0938, -0.0518, -0.0340,
0.1633, -0.0650, -0.2417, -0.0828, 0.1172, 0.0975, 0.2004, -0.0735,
0.0269, -0.0157, -0.0970, 0.0348, -0.0321, -0.0870, 0.0255, -0.0829,
0.0954, 0.1515, 0.0578, -0.0053, 0.1682, -0.0693, -0.0324, 0.1804],
device='cuda:0')),
('fc_layers.0.weight',
tensor([[-4.9319e-02, 3.4758e-03, -5.2684e-02, ..., -3.8735e-02,
-5.4818e-02, 1.2124e-02],
[-5.4787e-02, -2.7533e-02, -2.0990e-02, ..., -4.0816e-02,
-4.6761e-02, -2.8465e-02],
[-4.0150e-02, 8.5936e-04, -1.0309e-01, ..., -7.0499e-02,
-9.5101e-02, -4.9143e-02],
...,
[ 5.8440e-05, 4.2966e-02, 5.7693e-03, ..., 5.8275e-02,
-4.7894e-02, -3.9191e-02],
[-2.4364e-02, -4.5211e-03, 1.9238e-02, ..., -3.2138e-02,
-1.5131e-02, -1.8816e-02],
[-7.7690e-02, -2.2250e-03, 8.4725e-02, ..., -3.0941e-02,
3.6232e-02, 6.4043e-02]], device='cuda:0')),
('fc_layers.0.bias',
tensor([-6.4128e-02, 1.0881e-02, -7.8574e-02, -1.2811e-01, 4.6263e-02,
1.5897e-01, 5.6992e-02, 1.3649e-02, 1.0640e-01, 1.2061e-02,
-6.4502e-02, -6.4838e-02, -5.6275e-02, 2.3234e-01, 3.5786e-02,
-5.0140e-02, -9.0102e-02, -3.8450e-02, -5.4373e-02, -9.0967e-03,
-4.5822e-02, -3.4308e-02, -2.0472e-02, -5.6257e-02, -5.3586e-02,
-2.8521e-02, 9.1094e-02, -5.2511e-02, 1.0691e-02, -5.8200e-02,
-6.4856e-02, -2.4698e-02, -6.2445e-02, -2.6135e-04, -8.4684e-03,
6.3286e-02, 6.4375e-02, -7.8081e-02, 1.0131e-01, -6.8568e-02,
4.0177e-02, -3.9449e-02, -1.3123e-01, -5.0194e-02, -1.7292e-02,
-3.6593e-02, -3.3108e-02, -5.0110e-02, 1.3042e-01, -2.1822e-02,
1.6226e-03, 1.0117e-01, 4.9954e-02, 2.1096e-01, 5.7124e-02,
1.0227e-01, -2.1199e-02, -1.2851e-02, -1.1566e-02, 1.6168e-01,
-2.8576e-02, 3.6749e-03, -4.3911e-02, 2.0032e-01, 2.2231e-02,
-2.8436e-03, -8.4517e-02, 9.7206e-02, 1.5069e-01, 1.0732e-03,
-5.5819e-03, -2.0647e-02, 7.0257e-03, -1.3009e-01, -6.0443e-02,
3.4467e-02, -1.1129e-01, -2.3349e-02, -4.0632e-03, -2.7776e-02,
1.8195e-01, 2.6412e-02, -5.3373e-02, 1.8095e-01, 7.3706e-02,
1.9576e-02, 2.2572e-01, 2.5201e-02, 1.0237e-01, -4.6136e-02,
-1.1615e-01, -5.4846e-02, 3.9294e-01, -1.2536e-02, -5.2296e-02,
7.7626e-02, 2.4443e-01, 2.2190e-01, -6.8606e-02, 3.5705e-02,
-2.2074e-02, -3.9147e-02, 3.2050e-02, 1.0853e-01, 5.2902e-02,
-5.9557e-02, -5.8448e-02, 4.4363e-03, -8.1289e-02, -1.9989e-02,
1.2959e-03, 1.3056e-01, 7.2178e-02, -1.1410e-03, -7.0469e-02,
9.1288e-02, -1.0679e-01, -4.6028e-02, -2.2130e-02, -4.2936e-02,
1.0828e-02, 1.1295e-01, 3.2830e-01, 1.5188e-01, -2.1231e-02,
-2.0036e-02, -5.6487e-02, -4.3641e-02, 1.1438e-01, -3.0824e-02,
-7.3182e-02, 3.4454e-02, -3.2721e-02, -1.6793e-02, -4.6240e-02,
1.8310e-01, -4.2884e-02, -6.5078e-02, -6.8052e-02, -3.5445e-02,
-9.2535e-02, 7.7579e-02, -2.8456e-02, -5.1913e-02, 1.3828e-01,
-4.6614e-02, 3.8606e-02, -1.5625e-02, -1.1145e-02, 1.5836e-01,
8.9611e-04, 1.3964e-01, -4.5395e-02, 1.7598e-01, 5.4361e-02,
-4.0586e-02, 1.4207e-01, -2.0256e-02, -5.5789e-02, -6.1623e-02,
2.3218e-02, -7.7956e-02, 5.2671e-02, 2.8469e-02, -1.9208e-02,
-3.2379e-02, 5.2067e-02, -1.0461e-02, 5.3618e-02, 1.5311e-02,
1.2351e-01, -1.3067e-01, 2.5828e-02, -9.1386e-02, 1.3866e-02,
1.4277e-02, 2.3195e-01, 1.7288e-01, -7.4077e-02, 7.6781e-02,
2.2502e-02, 2.5193e-02, -4.7716e-02, -8.4707e-02, -4.6477e-02,
1.8112e-01, -4.3306e-02, 1.9309e-01, -8.6879e-02, 8.4588e-03,
-4.3503e-02, -1.9375e-02, -5.6262e-02, -8.3235e-02, -1.2867e-02,
3.6371e-02, -1.0853e-01, 3.4596e-03, 1.5404e-01, -7.3069e-02,
-3.9060e-02, 3.6475e-02, 1.5011e-02, 1.9380e-02, -6.6762e-02,
-2.0962e-02, 2.8877e-02, 1.4647e-02, -1.1697e-01, 2.4654e-02,
-2.6015e-02, -1.3499e-02, 1.8263e-01, -1.5983e-02, -1.8724e-02,
-1.5655e-02, -6.1335e-02, 1.0705e-01, -7.2062e-02, 8.5851e-02,
-7.1094e-02, -8.4573e-02, 1.0217e-01, -6.0859e-02, 3.6408e-02,
-6.2573e-02, -1.0453e-02, -1.1330e-02, 9.3073e-02, 1.1341e-02,
-1.6465e-01, -3.9062e-02, -1.7924e-02, -2.4294e-03, -3.4182e-02,
1.3131e-01, 4.1704e-03, -4.3734e-02, 1.7372e-02, -7.4191e-02,
-5.0581e-02, 4.7190e-02, 3.5199e-02, -1.3251e-02, 9.3482e-02,
5.2875e-02, -4.2718e-02, 4.8718e-02, -6.8461e-02, 8.8977e-03,
-4.3790e-02, -1.9370e-02, 8.3043e-03, 5.2214e-02, -2.7331e-02,
1.4526e-01], device='cuda:0')),
('fc_layers.2.weight',
tensor([[-0.0097, -0.0127, -0.0065, ..., -0.0087, 0.0059, 0.0690],
[ 0.0164, -0.0625, -0.0014, ..., 0.0798, 0.0030, 0.1606],
[-0.0099, -0.0768, -0.0010, ..., 0.0459, 0.0541, -0.0476],
...,
[-0.0409, 0.0272, 0.0256, ..., 0.0099, -0.0137, -0.0335],
[ 0.0056, 0.0429, -0.0326, ..., -0.0913, 0.0340, 0.0097],
[ 0.0234, -0.0038, 0.0071, ..., -0.0596, 0.0003, -0.0500]],
device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.1096, 0.0765, 0.2246, 0.1280, 0.1321, 0.0038, -0.1251, -0.2667,
-0.1005, -0.2009], device='cuda:0'))])},
{'ratio': 0.25,
'bias': 64,
'train_losses': [285.5163003591135,
259.3057644350574,
232.17203868226022,
215.6464661567831,
206.6534275163947,
199.2504381775648,
193.85461227827255,
190.33946176432397,
186.92739944391434,
184.06704771081814,
181.48890317568188,
180.29082502224475,
178.68610383432366,
177.20696611366972,
176.03793437048193],
'test_losses': [274.54728781008254,
239.55820038739373,
218.459972521838,
208.3049776694354,
200.1826932290021,
193.1748154303607,
190.6243873016507,
184.414440351374,
181.18878558102776,
181.33488809361177,
177.6447575606552,
175.7397717962078,
173.22310095207365,
173.34422142832886,
170.57400638916914],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[ 0.2533, 0.0406, -0.2288],
[ 0.0635, 0.2151, -0.2311],
[ 0.0484, 0.1967, -0.1714]],
[[-0.0898, 0.1240, -0.2035],
[ 0.1463, -0.0548, -0.3027],
[ 0.0768, 0.1712, -0.2531]],
[[ 0.1327, -0.0568, 0.1129],
[-0.0953, 0.0843, -0.0508],
[ 0.2331, -0.1434, -0.0219]]],
[[[ 0.1783, -0.0644, -0.2789],
[ 0.0057, 0.1408, -0.0541],
[ 0.0653, 0.2251, -0.2021]],
[[ 0.0376, -0.0528, -0.0237],
[ 0.1256, 0.1889, -0.3264],
[ 0.2919, -0.1255, -0.1806]],
[[-0.1371, -0.0530, -0.0460],
[ 0.1664, -0.1237, -0.1215],
[ 0.2433, 0.1487, -0.0528]]],
[[[ 0.0739, 0.1844, 0.0993],
[-0.2766, -0.1686, 0.0396],
[-0.0497, 0.0712, 0.1292]],
[[-0.0054, -0.0341, 0.1198],
[-0.3038, -0.2373, 0.0620],
[ 0.0033, -0.0507, 0.1618]],
[[ 0.0689, -0.1142, 0.2388],
[ 0.0025, -0.0781, -0.0397],
[-0.1003, 0.1907, 0.0324]]],
[[[ 0.1069, 0.2392, -0.1779],
[ 0.1449, 0.2609, -0.0252],
[-0.2049, 0.0361, 0.0059]],
[[ 0.0835, 0.2061, -0.0977],
[ 0.1495, 0.2726, -0.0017],
[-0.1933, -0.0798, 0.2320]],
[[-0.2815, 0.0745, 0.0609],
[-0.2839, -0.2659, 0.0623],
[-0.0145, 0.1291, -0.1384]]],
[[[-0.2738, -0.1228, -0.0280],
[-0.0860, -0.1097, 0.1037],
[ 0.2634, 0.0936, 0.1804]],
[[-0.1421, 0.0123, -0.2875],
[ 0.2389, -0.0967, -0.1953],
[ 0.1658, 0.3016, 0.0605]],
[[-0.1101, -0.2241, -0.0934],
[ 0.0722, -0.1084, 0.0213],
[ 0.1944, 0.1729, -0.0121]]],
[[[-0.1859, -0.2186, -0.1266],
[ 0.0890, -0.1575, -0.0209],
[ 0.0918, 0.2224, 0.3034]],
[[-0.1079, -0.1091, 0.0698],
[-0.0652, -0.1543, 0.0281],
[-0.0629, 0.0366, 0.1516]],
[[-0.0648, -0.0944, -0.1397],
[-0.0483, -0.1099, 0.1722],
[ 0.2095, -0.0152, 0.2823]]],
[[[ 0.1085, 0.0160, -0.0637],
[ 0.1606, 0.1411, 0.0013],
[-0.0022, -0.1967, -0.1593]],
[[ 0.2814, 0.0796, 0.1091],
[-0.0614, 0.0203, -0.0280],
[-0.1406, -0.2214, -0.2833]],
[[ 0.1457, 0.0326, -0.1763],
[ 0.0986, 0.0993, 0.1599],
[ 0.0290, -0.1132, -0.0190]]],
[[[-0.0818, -0.1111, -0.2873],
[ 0.1885, -0.1422, 0.1091],
[-0.0460, 0.2034, 0.1620]],
[[-0.2078, -0.2444, -0.2168],
[ 0.0785, 0.1908, -0.2195],
[ 0.2190, 0.1479, 0.2164]],
[[-0.0839, 0.0436, -0.2110],
[ 0.0645, -0.0736, 0.0970],
[ 0.0090, 0.0970, 0.1236]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([-0.2892, 0.2230, 0.0793, -0.0356, -0.3554, 0.1851, 0.1376, 0.1928],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[ 1.5094e-01, 1.9468e-01, 7.5380e-04],
[ 3.1963e-01, 8.3116e-02, 2.2222e-01],
[ 1.9121e-01, 2.5613e-01, 7.6342e-02]],
[[ 2.2602e-02, -1.7258e-01, -1.3733e-01],
[-8.5618e-02, -1.1985e-01, -2.0941e-01],
[-1.3987e-01, -2.9320e-01, -6.0671e-02]],
[[-1.2533e-04, 6.2679e-02, 2.0941e-02],
[ 1.7515e-01, 4.6075e-02, -3.6286e-02],
[-1.8740e-02, -3.9222e-03, -2.0909e-03]],
...,
[[ 9.1170e-02, 1.4328e-01, 2.0544e-02],
[-6.1220e-03, -1.6219e-03, -1.0877e-01],
[ 1.5996e-01, 2.5404e-02, -1.7607e-01]],
[[ 1.9768e-01, 1.7588e-01, 1.3777e-01],
[ 4.2163e-02, 1.0204e-01, -1.1996e-01],
[-4.3708e-03, -5.5974e-02, -1.3622e-01]],
[[-1.5515e-01, -1.0572e-01, -8.0109e-02],
[ 8.8985e-02, -9.9193e-02, -7.0264e-02],
[ 1.6365e-01, 9.6698e-02, -7.7630e-02]]],
[[[-1.5936e-01, -5.1349e-01, -4.4438e-01],
[-1.9750e-02, -5.7407e-01, -4.2682e-01],
[ 6.5333e-02, -2.6383e-01, 1.0463e-01]],
[[ 4.4722e-02, -9.5757e-02, -1.7825e-01],
[-1.2114e-01, -1.5551e-01, -1.0570e-01],
[-1.5112e-01, -1.1162e-01, -8.9066e-02]],
[[ 3.7978e-02, 1.5728e-02, 1.5866e-01],
[-3.0531e-02, 4.1422e-02, -9.9134e-02],
[ 4.5771e-03, -6.3470e-02, 5.5606e-03]],
...,
[[ 3.1875e-02, 5.1134e-02, 1.7540e-01],
[ 1.2263e-01, 1.5313e-01, -1.6464e-03],
[ 1.3405e-01, -4.7263e-02, -8.2574e-03]],
[[ 3.2569e-02, -7.8782e-02, -2.4343e-01],
[-2.9064e-02, -2.7236e-01, -1.9353e-01],
[ 3.1322e-02, 3.5840e-02, 2.4320e-02]],
[[ 8.4135e-02, 1.5901e-01, 1.2528e-01],
[-1.4292e-02, 9.0271e-03, 1.4496e-01],
[-1.8932e-02, 1.0415e-01, 1.2987e-01]]],
[[[-1.4746e-01, -5.7471e-02, -5.4547e-03],
[-1.3301e-01, -4.7255e-02, -1.2613e-02],
[-1.3015e-01, 2.7520e-02, -2.8585e-01]],
[[ 7.6024e-02, -1.1762e-01, -3.4653e-01],
[ 1.1840e-01, -8.1869e-02, -3.7135e-01],
[ 3.2538e-02, 2.2875e-02, -3.2394e-01]],
[[-7.6560e-02, 9.1736e-02, 1.2734e-01],
[ 3.5030e-03, 6.1709e-02, 2.4204e-01],
[-4.1666e-02, -1.5829e-01, -1.0027e-01]],
...,
[[ 6.1238e-03, 9.5369e-02, 6.2202e-02],
[ 1.1630e-01, 1.6893e-01, 1.1896e-01],
[-4.9683e-03, -1.0889e-01, 1.3045e-01]],
[[ 1.5822e-01, 4.2460e-02, -4.9971e-02],
[ 8.2857e-02, -1.4074e-01, 5.4017e-03],
[ 1.6483e-01, -2.8660e-02, 4.7883e-02]],
[[ 1.3498e-02, -2.3444e-01, -1.9638e-01],
[ 2.2067e-01, -1.2946e-01, -1.1075e-01],
[-1.5161e-02, -2.3064e-02, -2.2674e-01]]],
...,
[[[-7.2573e-02, -4.7093e-02, -1.5834e-01],
[-2.0173e-01, -2.9385e-02, 6.2145e-02],
[-1.5201e-01, -1.3530e-01, -6.7640e-02]],
[[-1.0336e-01, -6.9792e-02, 3.6656e-02],
[-1.4165e-01, -6.6396e-02, 3.0924e-02],
[-2.6205e-02, 1.3562e-01, 2.4397e-01]],
[[ 9.7805e-02, 1.9474e-01, 6.9329e-02],
[-1.0470e-01, -1.6503e-01, 3.7638e-02],
[ 3.9067e-02, -1.0489e-01, -5.6849e-02]],
...,
[[ 1.6698e-01, -1.6031e-02, 4.8593e-02],
[ 2.9916e-02, -4.1120e-03, -2.4000e-02],
[ 5.4132e-02, 5.4840e-02, -6.5153e-02]],
[[-1.5374e-01, -3.4793e-02, -1.7705e-01],
[-2.5481e-01, -2.2938e-01, -1.8723e-01],
[-2.5142e-02, -1.8019e-02, 1.0926e-01]],
[[ 1.4546e-01, 7.2512e-02, 9.4316e-02],
[ 3.6105e-02, 1.4621e-01, 6.3785e-02],
[ 1.1110e-01, 1.4140e-01, 1.3120e-05]]],
[[[ 2.7917e-02, -4.6308e-02, -3.9429e-02],
[-1.3019e-02, -5.3058e-03, -1.5834e-02],
[ 6.7656e-02, -1.7488e-01, -4.6800e-02]],
[[ 2.4485e-01, 5.0899e-02, 7.0051e-02],
[ 1.2058e-01, 2.9470e-02, 7.5311e-02],
[ 6.7617e-02, -1.8732e-02, -4.3856e-02]],
[[-1.6970e-01, -2.3944e-02, 1.2984e-01],
[-2.5764e-01, 1.1211e-02, 1.6341e-01],
[-2.1425e-01, -4.0145e-02, 1.3631e-01]],
...,
[[-1.0307e-01, -4.0941e-02, -2.2856e-02],
[-2.9180e-01, -9.4389e-02, 1.6313e-01],
[-5.8739e-02, 1.2447e-01, 1.7529e-01]],
[[-3.8916e-02, 1.8231e-01, -7.9478e-02],
[ 3.3207e-02, 1.4746e-01, -8.5822e-02],
[ 1.3676e-01, 9.8457e-03, -1.0475e-01]],
[[ 1.5666e-02, -4.2678e-02, -2.3277e-03],
[-4.0229e-02, 3.5703e-02, -7.8330e-02],
[ 1.6452e-02, -4.9438e-02, -1.0041e-02]]],
[[[-8.2038e-02, -7.1308e-02, 1.1418e-01],
[-5.3905e-02, -1.4262e-01, -1.8510e-01],
[ 5.2729e-02, 4.2223e-02, -9.6920e-02]],
[[-1.3145e-01, -2.9861e-01, -2.5665e-01],
[ 1.1855e-01, -1.6704e-01, -2.4246e-01],
[ 2.3309e-01, 3.1515e-03, -3.1966e-01]],
[[ 4.2963e-02, 7.8716e-02, 7.0347e-02],
[-4.5566e-02, 1.1999e-01, -1.2129e-02],
[ 6.6789e-02, -2.8065e-02, 9.7555e-02]],
...,
[[-1.5652e-02, 7.5908e-02, -1.1365e-01],
[ 5.5173e-02, -1.5968e-01, -6.4837e-02],
[ 3.1128e-02, -4.2259e-02, 1.6525e-01]],
[[-5.2299e-02, -3.7927e-02, 1.8574e-01],
[ 6.6532e-02, -2.2450e-02, 2.6438e-02],
[-6.3516e-02, -2.8269e-02, -1.1808e-01]],
[[-2.9327e-01, -4.3526e-01, -2.2043e-02],
[ 9.9118e-03, -2.0453e-01, -1.9614e-01],
[ 1.3176e-01, 3.3974e-02, -2.0674e-01]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([-0.5728, -0.0013, 0.1795, 0.3269, -0.1715, -0.0525, 0.0449, -0.1068,
-0.1353, 0.1010, 0.1171, 0.2272, -0.0872, -0.1037, -0.1429, 0.3252],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[ 2.1728e-02, 5.2630e-02, 2.3942e-03],
[ 3.8761e-01, 2.7323e-01, 1.6949e-01],
[ 1.7911e-01, 1.8614e-01, 6.1702e-02]],
[[ 3.5245e-02, 3.9799e-02, -7.1483e-02],
[-4.6719e-03, 1.5468e-01, 9.4162e-02],
[-9.7308e-02, -7.0668e-02, -1.4673e-01]],
[[-8.7030e-02, -6.7694e-02, 3.1190e-02],
[ 1.9769e-02, -5.0702e-02, -4.1775e-02],
[ 9.6097e-06, -1.7619e-01, -2.2467e-01]],
...,
[[ 5.2333e-02, 1.2302e-01, 2.7765e-02],
[ 8.9532e-02, 7.8566e-02, 6.8501e-02],
[-5.1733e-03, -3.1442e-02, -4.2563e-03]],
[[ 7.0043e-02, -5.5693e-02, -1.5692e-01],
[ 2.3947e-02, -1.8145e-01, -3.1523e-02],
[-5.2771e-02, -7.6291e-02, 1.6706e-02]],
[[-3.9286e-03, 6.8743e-02, 1.2043e-01],
[ 6.4888e-02, -3.4236e-02, -1.6734e-01],
[-5.7385e-02, 5.4813e-02, -7.8884e-02]]],
[[[-7.5672e-02, -9.0795e-02, -2.2100e-01],
[-8.3276e-02, -2.1331e-01, -4.9283e-01],
[-2.7687e-01, -3.3981e-01, -6.0480e-01]],
[[-1.2064e-01, 2.7851e-02, 7.7794e-03],
[-1.4250e-01, -5.3815e-02, 6.8009e-02],
[-1.2587e-01, -5.0577e-03, 4.9952e-02]],
[[-1.2463e-02, 5.6968e-02, -8.3207e-02],
[ 4.0247e-02, 2.4968e-01, 5.8994e-02],
[-5.9190e-02, 1.5151e-01, 7.0508e-03]],
...,
[[-5.7357e-02, -2.1197e-02, -1.3098e-03],
[ 4.6665e-03, -2.9549e-01, -1.0103e-02],
[-6.8333e-02, -2.8511e-01, -5.0232e-02]],
[[ 2.6150e-02, -6.3789e-02, -1.2112e-02],
[ 3.2420e-02, 1.2151e-01, -1.0587e-01],
[-9.9781e-02, -8.9623e-04, -6.3924e-02]],
[[-9.2116e-03, 3.5436e-02, -2.4404e-02],
[-4.4814e-02, 1.7355e-01, 4.3271e-02],
[-3.9230e-02, 1.1525e-01, 1.8208e-01]]],
[[[-1.5820e-01, -7.1977e-02, -1.8396e-02],
[-1.2124e-01, -1.3939e-01, 1.0462e-01],
[-1.8008e-01, -2.5676e-02, 1.1663e-01]],
[[-1.6424e-02, -2.0127e-01, -2.8704e-01],
[-1.0429e-01, -1.6133e-01, -1.2613e-01],
[-7.0085e-02, -1.3132e-02, -2.4241e-02]],
[[ 2.1578e-01, -1.7668e-01, 8.6638e-02],
[ 3.3567e-01, -7.2064e-02, -7.4912e-02],
[ 1.3776e-01, 5.5203e-02, -1.1235e-01]],
...,
[[-8.2935e-02, -2.5879e-02, -1.3127e-01],
[-9.8151e-02, -2.3063e-02, -1.0501e-01],
[-1.0007e-01, -5.8114e-02, 3.2625e-02]],
[[-3.8438e-02, -1.9683e-01, -1.1223e-02],
[ 4.6900e-02, -5.3287e-02, 1.0722e-01],
[ 1.3250e-01, 4.1783e-03, -1.1160e-02]],
[[ 2.7420e-01, -6.7518e-03, 7.1972e-03],
[ 1.7095e-01, 1.5634e-01, -6.2666e-02],
[ 1.1510e-01, 8.5963e-02, -7.1538e-02]]],
...,
[[[-3.3232e-01, -4.3834e-01, -5.1568e-01],
[-3.9092e-01, -4.5954e-01, -4.7412e-01],
[-8.3066e-02, -5.1902e-01, -1.5401e-01]],
[[ 4.5592e-02, -6.7884e-02, -1.3589e-01],
[ 3.0226e-02, -1.3172e-01, -6.1548e-02],
[ 1.0007e-01, 7.5126e-02, -4.2426e-02]],
[[-2.6801e-01, -1.0627e-01, -1.5809e-01],
[-2.6248e-01, 7.5314e-02, 2.3398e-02],
[-3.3630e-01, -6.0620e-02, 1.4539e-01]],
...,
[[ 1.7500e-02, -2.1715e-01, -9.0147e-02],
[ 4.3664e-03, -1.4598e-01, -2.0399e-01],
[-3.1129e-02, -1.2289e-02, -1.2711e-01]],
[[-8.1276e-02, -6.2434e-02, -1.1598e-02],
[-3.1767e-02, 8.6637e-02, -5.8052e-02],
[ 6.5709e-02, 3.5437e-02, -1.3751e-03]],
[[-3.3534e-01, -2.0914e-01, -2.0858e-01],
[-4.0997e-01, 2.9943e-02, -3.0436e-02],
[-3.2229e-02, -8.5913e-02, 9.9581e-02]]],
[[[-1.0987e-01, 2.8947e-02, 2.1639e-02],
[-1.7189e-01, -1.1264e-01, 9.7130e-02],
[-3.5176e-01, -2.5217e-01, -1.0666e-01]],
[[ 1.1445e-01, 2.3344e-01, 1.4400e-01],
[ 5.4448e-02, 4.7000e-02, 3.8510e-02],
[ 3.4732e-02, -8.3324e-02, 1.9157e-01]],
[[ 5.2047e-02, -9.6040e-02, -1.3020e-01],
[ 7.0502e-02, -2.3876e-01, -1.2113e-01],
[ 5.1645e-02, -2.4714e-01, -1.8625e-01]],
...,
[[-1.9911e-02, 1.1973e-01, -3.6469e-02],
[ 2.3694e-02, 9.2096e-02, -9.4285e-02],
[ 3.6380e-02, -9.7056e-02, 6.9356e-04]],
[[ 2.4598e-01, -1.8929e-01, 9.6025e-02],
[ 4.1170e-04, -1.6619e-01, 7.1991e-02],
[-2.0749e-01, -2.0233e-01, 3.5437e-02]],
[[ 1.7257e-01, -2.1850e-01, -2.2096e-01],
[ 2.6451e-02, -2.4306e-01, -2.3709e-01],
[ 1.6072e-01, 2.5082e-02, 6.6832e-02]]],
[[[ 2.8867e-02, 4.7911e-02, -1.0941e-01],
[-1.9883e-02, -1.7949e-01, -5.4703e-02],
[-4.6234e-02, -1.2388e-01, -9.8021e-02]],
[[-3.8101e-03, -5.3589e-02, 7.2496e-02],
[-3.8205e-02, -1.8050e-01, -2.1222e-01],
[-1.7202e-01, -3.6188e-02, -8.8466e-02]],
[[-9.6553e-02, -1.0744e-01, -1.4225e-01],
[-6.0951e-02, -9.3238e-02, -1.0119e-01],
[ 2.1693e-02, 7.2374e-02, -1.0753e-01]],
...,
[[ 4.1884e-03, -8.2604e-04, -2.1379e-02],
[-1.4076e-01, -5.5005e-02, -6.7203e-02],
[-3.3888e-02, 7.9381e-03, -1.0976e-01]],
[[-1.2980e-01, -2.0756e-01, -1.8738e-01],
[-3.0658e-02, 4.2171e-02, -6.9841e-02],
[-4.6573e-02, -3.2403e-02, 8.4980e-03]],
[[ 5.7860e-03, 3.0538e-02, -3.3064e-02],
[-1.2329e-03, 6.1035e-02, -2.8797e-02],
[ 1.8567e-01, 3.4966e-02, -7.5786e-02]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([-0.0375, 0.0991, 0.0363, -0.0761, 0.0939, 0.0147, -0.0179, -0.1867,
-0.2225, -0.0022, 0.1563, 0.0523, -0.0371, 0.0212, 0.1927, 0.0876,
-0.1129, -0.0499, 0.0637, 0.0119, 0.0167, 0.2519, 0.1397, 0.1412,
-0.0167, -0.0492, -0.0062, 0.0596, 0.1127, 0.0121, 0.0137, 0.1658],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[-2.6410e-01, -2.2833e-01, -2.6027e-01],
[-2.0674e-01, -2.3448e-01, -3.5770e-01],
[-1.1486e-01, -1.6024e-01, -1.0068e-01]],
[[-1.7206e-02, -3.1554e-01, 1.0887e-01],
[-5.1135e-02, -2.3574e-01, -5.9363e-02],
[-6.2212e-02, 4.9530e-02, -1.3754e-01]],
[[-2.8760e-01, -1.5249e-01, -2.2301e-01],
[-1.6359e-01, -3.7290e-03, -1.8305e-01],
[-1.3132e-02, 1.8234e-01, 1.0946e-01]],
...,
[[-1.9171e-01, -1.4600e-01, 2.0791e-01],
[-1.4856e-01, -3.3654e-02, 2.2327e-01],
[ 1.4168e-02, -9.5406e-02, -5.6246e-02]],
[[ 1.2260e-01, 1.5900e-01, 7.5252e-03],
[ 6.1457e-02, 2.3149e-02, -6.3167e-02],
[ 1.3454e-01, -7.7948e-03, -5.5733e-02]],
[[-4.8124e-02, -7.7241e-02, 1.0578e-02],
[ 2.7407e-02, -1.0988e-01, 8.1456e-02],
[-3.6138e-02, -7.6204e-02, 8.7036e-02]]],
[[[-1.5556e-01, -8.9318e-03, 6.0418e-03],
[-3.2289e-02, 1.7013e-02, 6.1508e-03],
[ 3.6880e-02, -1.0237e-01, 1.1284e-03]],
[[-1.5154e-02, -2.1520e-02, -2.4617e-02],
[-1.1717e-01, -1.0146e-03, 2.0507e-02],
[ 1.8889e-03, 6.5928e-02, -4.4277e-03]],
[[ 8.2647e-02, 1.1684e-02, -1.2002e-01],
[ 1.4327e-02, -7.3127e-02, 1.2312e-01],
[ 1.0495e-01, 1.0632e-01, 1.4319e-01]],
...,
[[ 7.4819e-02, 8.7836e-02, 8.1764e-02],
[ 4.3170e-03, 9.8873e-02, 6.7637e-02],
[-4.1735e-02, 5.6140e-02, -2.4025e-02]],
[[-4.6777e-02, -2.3290e-01, -7.3912e-02],
[ 8.1447e-02, -9.5589e-02, -1.7427e-01],
[ 1.1902e-01, -1.3003e-01, -2.5650e-01]],
[[-2.4138e-01, 3.1020e-02, -1.4127e-02],
[-2.2240e-02, -5.1911e-02, 6.9222e-03],
[ 1.0674e-01, 6.9149e-02, -6.0621e-04]]],
[[[-1.7868e-01, -2.1638e-02, -1.5563e-02],
[-5.0815e-02, -4.6226e-02, -5.7530e-02],
[-1.2266e-01, -8.2272e-02, -1.0395e-01]],
[[-9.7794e-02, -1.7026e-01, -2.7401e-02],
[-1.1259e-01, -1.9978e-01, -1.7611e-02],
[-4.4111e-02, -4.4471e-02, 8.3266e-02]],
[[-4.5178e-02, -6.5804e-02, -2.2861e-02],
[-1.5874e-01, -1.7804e-01, -1.2389e-01],
[-1.9988e-01, -7.9345e-02, -1.7176e-01]],
...,
[[-5.1730e-02, -3.8245e-02, -1.7323e-01],
[-9.1664e-02, -3.7677e-02, -1.2835e-01],
[-5.3004e-02, -4.0801e-02, -1.3423e-01]],
[[-5.9158e-02, -8.6432e-02, 9.0558e-03],
[-5.9596e-02, -3.7870e-02, -4.3703e-02],
[-5.7774e-03, -3.4823e-02, -5.4511e-03]],
[[ 2.3303e-02, -2.4107e-02, -9.4542e-03],
[-2.0126e-02, 3.9591e-02, -2.1544e-02],
[-4.6097e-02, -1.0040e-01, 6.9057e-02]]],
...,
[[[-2.5028e-02, -3.7586e-02, -8.9865e-02],
[-2.6090e-02, -4.4840e-02, -3.0712e-02],
[ 3.6134e-02, 8.9997e-02, -7.9438e-02]],
[[-5.5553e-02, -3.6308e-02, -7.5372e-02],
[-2.3004e-01, -2.6426e-01, -1.2584e-01],
[-3.0273e-01, -2.3836e-01, -1.1724e-02]],
[[ 5.5194e-02, 1.0354e-01, -4.4952e-02],
[ 3.0090e-02, -1.4287e-02, 7.0338e-02],
[-3.7946e-02, -4.5199e-02, -1.1103e-01]],
...,
[[ 1.5346e-02, 2.8321e-02, 4.3354e-02],
[ 3.1225e-02, 7.9333e-02, 1.0333e-01],
[-9.4822e-02, 1.8942e-02, -2.2471e-02]],
[[ 7.6819e-02, 4.0624e-02, -1.9201e-02],
[-1.5589e-01, -1.0797e-01, -1.2562e-02],
[ 5.7573e-03, -4.4795e-02, 3.9189e-02]],
[[-1.9310e-01, 6.1404e-02, -7.9610e-02],
[-4.1157e-02, -5.1700e-02, -3.7243e-02],
[-1.2409e-01, -3.3247e-02, 4.1459e-02]]],
[[[-2.2319e-02, -4.5655e-02, -6.9393e-02],
[ 6.2386e-03, -9.0274e-02, -6.0716e-02],
[-4.0100e-02, -5.2252e-02, -6.0356e-02]],
[[ 2.4850e-04, -1.2127e-01, -6.2331e-02],
[-7.7391e-03, -4.3015e-02, -6.9628e-02],
[ 9.2558e-03, -4.8260e-02, -4.7947e-02]],
[[-7.7392e-03, -5.5801e-03, -1.4703e-02],
[-2.1926e-02, 1.2977e-02, -2.1223e-02],
[-7.3018e-02, 1.9575e-02, -4.6790e-02]],
...,
[[-5.1031e-03, -6.2310e-02, -1.4109e-02],
[ 1.6538e-03, -7.6694e-02, -3.1059e-02],
[ 4.0605e-02, -2.0215e-02, 7.4315e-04]],
[[-5.4290e-02, 1.3139e-04, -3.0670e-02],
[-2.4301e-03, -5.7994e-02, -5.6627e-02],
[ 1.3833e-02, -6.5684e-02, -1.8572e-02]],
[[-9.0554e-03, 9.8174e-03, -6.5063e-02],
[-4.9720e-02, -3.6035e-02, -2.3389e-02],
[-2.3674e-02, -5.2845e-02, -5.2894e-02]]],
[[[-3.7117e-03, -6.6202e-02, -1.2704e-01],
[ 6.4257e-02, -6.8949e-02, -2.2595e-01],
[ 3.8658e-02, -8.9692e-02, -1.2413e-01]],
[[-1.8170e-02, -2.6621e-02, -1.9750e-01],
[ 8.7753e-02, 1.8665e-02, -1.5818e-01],
[ 3.7312e-02, -6.2993e-02, -1.6945e-01]],
[[ 1.2451e-01, -4.7311e-02, -1.2807e-02],
[ 7.1896e-02, 3.2212e-02, -1.4319e-01],
[ 2.4246e-01, -2.7764e-03, -2.5240e-01]],
...,
[[ 7.3104e-02, 7.4822e-02, 6.2290e-03],
[ 4.4447e-02, -9.8515e-03, -1.8269e-01],
[-4.6692e-02, 4.7886e-02, -2.2255e-01]],
[[ 2.4401e-02, -4.4694e-02, -2.1658e-01],
[ 8.5335e-03, -8.7980e-02, -1.8868e-01],
[ 4.7853e-02, -8.1057e-02, -1.5888e-01]],
[[-5.5861e-02, -3.5101e-02, -1.1471e-01],
[ 7.6755e-02, -8.7086e-02, -5.5914e-03],
[-1.1343e-02, -8.8747e-02, 5.9771e-02]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([-0.0439, -0.0719, -0.0799, 0.0741, 0.1076, 0.0389, 0.0912, 0.0217,
-0.0394, 0.1620, 0.0327, -0.0110, -0.0641, -0.1086, -0.0543, -0.0842,
-0.1325, -0.0941, -0.0113, 0.0333, 0.1669, -0.0447, 0.0835, -0.0004,
-0.0923, 0.0666, -0.1266, 0.0386, -0.0826, 0.0096, -0.0752, -0.0437],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[ 8.6426e-02, 2.1797e-02, -3.9199e-02],
[ 1.3355e-01, 8.5501e-02, 3.7616e-02],
[ 7.6844e-02, 1.4863e-01, 1.7123e-01]],
[[ 7.1456e-02, -3.2666e-02, -1.1581e-01],
[ 2.6786e-02, -2.8813e-02, -1.0588e-01],
[-4.1158e-02, -1.0356e-01, -9.5379e-02]],
[[-1.0686e-01, 3.0028e-02, 8.4079e-02],
[-4.6329e-02, -6.3910e-02, -1.1090e-01],
[-1.1279e-02, -4.7655e-02, -2.7544e-02]],
...,
[[ 1.0120e-02, -1.7080e-01, -3.0620e-01],
[ 6.8019e-02, -3.6697e-02, -1.7948e-01],
[ 2.3565e-01, 1.4146e-01, 6.0344e-03]],
[[-3.1339e-02, 5.2257e-02, -9.5134e-03],
[ 5.1903e-02, -5.3630e-02, -1.7661e-02],
[-2.1612e-02, 2.2730e-02, -2.2402e-02]],
[[ 1.0649e-01, -5.2081e-02, -3.2283e-01],
[ 1.6332e-01, -3.0291e-02, -2.5311e-01],
[ 7.3995e-02, -1.5796e-01, -2.6852e-01]]],
[[[ 1.5438e-01, 2.5297e-02, -1.8355e-01],
[ 1.8914e-01, -5.2385e-03, -9.6660e-02],
[ 2.4609e-02, -5.4415e-02, -7.0329e-02]],
[[-6.8391e-02, 1.7663e-02, 1.5850e-03],
[-1.1501e-01, -3.7676e-02, 4.6195e-02],
[-1.2336e-01, 1.3644e-02, 6.0781e-02]],
[[-5.1606e-02, -3.1204e-02, 2.8009e-02],
[-3.0995e-02, -1.2368e-01, -6.7305e-02],
[-1.0599e-01, -3.2039e-02, 3.4966e-02]],
...,
[[ 1.2025e-01, 1.2962e-01, -3.1888e-02],
[ 1.3747e-02, 1.1606e-01, -8.8742e-02],
[-8.2395e-02, 5.7174e-02, 2.4805e-02]],
[[ 9.3494e-02, 2.4859e-02, -2.3073e-02],
[ 1.9609e-02, -2.5809e-02, 1.5127e-03],
[ 1.4194e-02, 1.2077e-02, -2.0555e-02]],
[[ 1.0312e-01, 6.5444e-02, -2.8092e-03],
[ 5.8856e-02, 1.2431e-01, -6.2639e-02],
[-3.5463e-02, -7.2379e-02, -8.9611e-02]]],
[[[ 1.1961e-02, 1.1197e-01, -1.7356e-01],
[-1.2116e-01, 1.0914e-01, -1.2409e-01],
[-6.7986e-02, 1.0896e-01, -1.0881e-01]],
[[ 5.2806e-03, 4.1198e-02, -1.5793e-01],
[-3.1991e-02, 9.4710e-02, 5.2005e-03],
[-1.0076e-01, -2.8849e-02, 3.0433e-02]],
[[-8.5886e-02, -1.3900e-01, 4.5624e-02],
[-7.7788e-02, -8.4724e-02, -2.7699e-02],
[ 2.9873e-02, -8.1353e-02, 1.4104e-02]],
...,
[[ 2.1390e-01, 1.3182e-02, -1.1229e-01],
[ 6.0955e-02, 5.9848e-02, -6.7431e-02],
[-5.5567e-02, 2.0765e-02, 7.8939e-02]],
[[-3.2753e-02, 3.7514e-02, -1.3289e-02],
[ 3.3121e-02, 9.6842e-02, 4.9666e-02],
[ 2.2869e-02, 9.8315e-03, 3.2361e-02]],
[[ 8.3017e-02, 1.3133e-02, -1.9748e-01],
[ 1.7594e-01, 1.1178e-01, -8.6473e-02],
[-1.1852e-01, -3.8253e-02, -1.5474e-01]]],
...,
[[[-9.3220e-02, 1.5268e-02, -3.3467e-02],
[-8.9890e-02, -1.1772e-01, -3.9349e-02],
[-7.5822e-02, -6.3970e-02, -8.2249e-02]],
[[ 9.2735e-03, 1.3513e-01, 2.0563e-01],
[ 3.1339e-02, 2.5865e-02, 9.2250e-02],
[ 1.5013e-01, 3.0897e-02, 1.1052e-01]],
[[-3.1499e-02, 6.5623e-02, -7.2418e-02],
[-7.9470e-05, -3.7257e-02, 9.9745e-02],
[ 2.1616e-02, -1.5304e-01, -1.1579e-02]],
...,
[[-7.8132e-02, 3.4339e-02, 3.8222e-02],
[-4.5024e-02, -4.6725e-02, -2.1489e-01],
[-2.2900e-01, -1.5099e-01, -1.3902e-01]],
[[-6.7490e-02, -3.6790e-02, -4.6022e-02],
[ 5.3388e-02, 7.5724e-02, -3.2451e-02],
[-2.7376e-02, 2.0719e-03, 1.2153e-02]],
[[-1.1548e-01, -3.1262e-02, -1.0043e-01],
[ 1.8285e-02, 2.1833e-02, -1.7257e-02],
[-8.2101e-02, -2.9608e-02, -7.0660e-02]]],
[[[-2.4462e-04, 1.4978e-01, -9.0889e-02],
[ 1.1613e-02, -2.5655e-02, -6.6419e-02],
[-3.1288e-01, -8.1445e-02, 7.5363e-02]],
[[ 7.9221e-02, 1.2951e-01, -3.9869e-03],
[-3.9922e-02, 6.1691e-03, -2.4889e-02],
[-1.6941e-01, -8.3170e-02, -2.3148e-02]],
[[-3.9793e-02, 3.6401e-02, -1.0210e-01],
[ 7.4450e-02, 7.2796e-02, -1.0820e-01],
[ 2.0992e-02, 1.0334e-01, -5.5245e-02]],
...,
[[ 8.5737e-02, -3.0562e-02, -6.5253e-02],
[ 1.9195e-01, 6.9118e-02, -4.7662e-02],
[ 2.2601e-02, -2.3439e-03, -8.6420e-02]],
[[ 4.4231e-02, -2.5281e-02, 1.8323e-02],
[ 5.1422e-02, 1.6895e-02, 1.0371e-02],
[ 3.9959e-04, 7.4141e-02, 5.3880e-02]],
[[ 1.5795e-01, -1.3730e-01, -1.9271e-01],
[ 1.7841e-01, -8.7092e-02, -8.5473e-02],
[-1.2620e-01, -5.0230e-02, -1.9118e-01]]],
[[[ 4.5950e-02, -1.6166e-02, 6.2711e-03],
[-6.6334e-02, 8.4025e-02, 1.5920e-01],
[ 3.6232e-02, 1.1750e-01, 1.0273e-01]],
[[-7.4123e-02, 2.6865e-02, 3.1123e-02],
[-1.7145e-02, 9.8270e-03, 7.8101e-02],
[-2.1271e-02, -2.2083e-01, -1.2811e-01]],
[[-6.3518e-02, -1.2057e-01, -1.4001e-03],
[-1.2146e-02, -4.1274e-02, 3.9880e-02],
[ 1.6716e-02, -1.0003e-01, 8.4397e-03]],
...,
[[ 5.1129e-02, 2.3105e-01, 1.9321e-01],
[-1.4539e-01, -1.0817e-01, -9.2950e-02],
[-1.1532e-01, -1.4824e-01, -1.2219e-01]],
[[-2.4048e-02, 2.0254e-02, -2.0879e-02],
[ 1.4457e-02, 2.7631e-02, 3.3039e-03],
[-1.7891e-02, -2.5754e-02, -3.4248e-02]],
[[ 7.6289e-02, 1.2048e-01, -2.0779e-02],
[-1.5452e-01, -5.8023e-02, -1.9509e-01],
[-7.0120e-02, -1.9292e-01, -2.0820e-01]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([ 0.1502, 0.1128, 0.0122, -0.0305, 0.3143, 0.2218, 0.1280, 0.0417,
-0.0772, 0.1388, -0.0752, -0.2769, 0.0272, 0.2006, -0.0329, 0.2665,
-0.1000, 0.0155, -0.1052, 0.1499, 0.0040, -0.0354, 0.0727, -0.0807,
-0.0214, -0.1454, -0.0101, 0.3247, -0.0621, 0.0224, -0.1306, -0.0787],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[-1.3264e-01, -2.9151e-01, 1.0229e-01],
[-1.1679e-01, -3.0205e-01, 1.6915e-01],
[ 6.8524e-02, -1.2599e-01, 3.8157e-01]],
[[ 8.4378e-02, -1.5948e-02, -7.7643e-02],
[-1.0202e-01, 2.4697e-03, 1.9467e-02],
[-6.0757e-02, -1.6555e-01, 1.0704e-01]],
[[-7.7927e-03, -1.5828e-01, -1.3205e-01],
[-3.2009e-02, -1.0647e-01, -1.3314e-01],
[ 2.1935e-01, -3.7758e-02, -1.1615e-01]],
...,
[[-7.5167e-02, -1.5619e-01, -1.1565e-02],
[ 1.2892e-01, -1.9389e-01, -1.9683e-01],
[ 1.7564e-01, -1.2386e-01, -2.1833e-01]],
[[-7.6117e-02, 1.4472e-01, 8.7908e-02],
[-1.0160e-01, -1.1475e-02, 1.3618e-01],
[-1.6139e-01, 9.5722e-02, -2.0339e-02]],
[[-9.7192e-02, 9.1486e-02, 5.3639e-02],
[ 4.1155e-02, -2.9695e-03, 1.9878e-01],
[ 5.0457e-02, 1.2672e-01, 7.0004e-02]]],
[[[-9.8680e-02, 4.9636e-02, 3.3305e-02],
[-5.0950e-02, -5.0230e-02, -1.1498e-01],
[-1.0477e-02, -9.7717e-02, 1.1842e-02]],
[[ 7.4132e-03, 3.0588e-02, -1.8854e-02],
[ 4.2896e-02, 5.8263e-02, -5.1249e-02],
[-1.2457e-01, -1.0597e-01, -1.9869e-01]],
[[-2.2128e-01, 3.7040e-02, -8.7045e-03],
[-6.4037e-02, -2.2240e-01, -1.8471e-01],
[-1.1147e-01, -1.0835e-01, -8.1196e-02]],
...,
[[-7.8580e-03, -8.7576e-02, -1.4467e-01],
[ 5.6544e-03, -1.5112e-01, -6.5250e-02],
[-7.2247e-02, -6.9145e-02, 7.2510e-02]],
[[-8.8057e-02, -4.3304e-02, 6.6304e-02],
[ 1.0771e-01, -1.6217e-01, -8.6107e-02],
[ 2.1845e-02, -1.3402e-01, -2.1727e-01]],
[[-8.6672e-02, -5.6098e-02, 5.9651e-02],
[-2.9663e-02, 5.0192e-02, -1.4170e-02],
[-9.5199e-02, -2.8130e-02, -5.5076e-02]]],
[[[ 9.4760e-02, -1.0316e-01, 1.2684e-03],
[-3.1383e-02, -1.0084e-01, 7.4339e-02],
[-1.7629e-01, -2.8508e-01, 3.1656e-02]],
[[-4.4612e-02, -5.8071e-02, -8.2660e-02],
[ 1.0553e-01, -7.5769e-02, -3.3597e-01],
[ 1.2979e-02, -1.8597e-01, -1.1305e-01]],
[[-3.7054e-02, -1.0308e-01, 1.2034e-01],
[ 1.6592e-01, -3.3729e-02, -1.2318e-01],
[ 1.2960e-01, 3.1693e-02, -1.6466e-02]],
...,
[[-4.7770e-02, 4.0051e-02, 4.1070e-02],
[ 1.5374e-01, -3.7037e-01, -3.0887e-01],
[-6.1389e-04, -3.1979e-01, -2.9440e-01]],
[[-4.7312e-01, -3.3457e-01, -1.4290e-02],
[-2.1771e-01, -1.3146e-01, -1.3233e-01],
[ 1.5228e-03, -1.2346e-01, -7.1570e-02]],
[[-1.4020e-01, 6.5083e-02, 1.3440e-01],
[ 6.5302e-02, 1.6462e-01, 2.2346e-01],
[-7.1069e-03, 1.0338e-01, 7.1218e-02]]],
...,
[[[ 4.3710e-02, -1.4300e-01, -4.2765e-02],
[-3.5533e-02, -1.2861e-01, -1.5668e-01],
[ 1.0768e-01, 8.9684e-03, -1.6937e-01]],
[[-3.2378e-02, -2.3527e-01, -1.5568e-01],
[ 6.0768e-03, -7.1941e-03, 5.2117e-02],
[ 1.2137e-01, 9.8384e-02, -5.9639e-02]],
[[-2.7300e-02, 4.9506e-02, 6.6014e-02],
[-1.1341e-01, -9.1640e-02, -1.3001e-01],
[-2.2332e-02, -8.3925e-02, 4.2310e-02]],
...,
[[ 1.7496e-02, -3.0798e-01, -3.9919e-01],
[-5.1250e-02, -2.8322e-01, -9.3512e-03],
[ 4.2149e-02, -6.2928e-02, 5.6356e-02]],
[[-2.6399e-01, -2.7458e-01, 8.2910e-02],
[ 8.3809e-02, -7.3384e-02, -5.3613e-02],
[ 2.6867e-01, 8.4356e-02, -4.9719e-02]],
[[-3.6000e-02, 3.2637e-02, 1.4108e-02],
[-9.1796e-02, 3.9141e-02, -1.1400e-01],
[ 8.4020e-02, 5.9617e-02, -1.8983e-01]]],
[[[ 3.0571e-02, -2.2338e-02, -5.3949e-03],
[-5.7011e-02, 2.2507e-04, -6.1876e-02],
[-3.3216e-02, 6.3309e-03, -3.2026e-02]],
[[-6.4567e-02, -5.9177e-02, 4.2739e-02],
[-4.5692e-02, -9.1898e-02, -1.6842e-02],
[-5.2788e-02, -9.3047e-02, 1.4900e-02]],
[[-6.3648e-02, -3.2635e-02, -3.9760e-02],
[-1.9710e-02, -1.6121e-02, -8.7604e-02],
[-1.0531e-01, -7.2764e-02, -3.3907e-02]],
...,
[[-4.5832e-02, -6.0857e-02, -4.9318e-02],
[-2.6859e-02, 4.2082e-02, -5.7731e-02],
[-4.6709e-02, 5.6805e-02, 9.8472e-03]],
[[ 2.8521e-02, -3.3160e-02, 2.3824e-02],
[-5.3116e-02, -3.9798e-02, 6.3144e-02],
[ 3.7256e-02, 2.8475e-02, 1.9141e-03]],
[[ 4.0231e-02, -7.1254e-02, -9.8294e-02],
[-6.8861e-02, -3.2943e-02, 1.8298e-02],
[-2.0842e-02, -1.6487e-02, -5.5843e-02]]],
[[[-1.1198e-01, -7.6479e-02, 8.2727e-02],
[ 1.1904e-02, 1.9846e-02, -4.8563e-02],
[-1.1442e-01, 1.1556e-02, -1.2914e-01]],
[[ 1.0969e-01, 3.5799e-02, -3.0975e-01],
[ 2.3147e-01, 4.0035e-02, -2.1095e-01],
[ 4.7853e-02, -1.0139e-01, -2.2047e-01]],
[[ 4.3894e-02, -6.6177e-03, -1.6610e-01],
[-8.8105e-02, -7.5799e-03, 2.3434e-02],
[-1.7640e-01, -2.1639e-01, 3.6474e-03]],
...,
[[ 7.2049e-03, -1.9132e-02, -1.8749e-01],
[ 1.0029e-01, -6.2149e-02, -2.1034e-01],
[-5.2656e-03, -2.0517e-01, -4.6498e-02]],
[[ 1.0962e-01, -2.3240e-02, -2.6605e-01],
[ 4.9110e-02, -1.2984e-01, -3.3699e-01],
[-2.0775e-02, -1.2751e-01, -5.6817e-02]],
[[ 3.5810e-02, -1.3843e-01, -1.4034e-01],
[-4.2767e-02, -1.1700e-01, -6.0140e-02],
[ 1.3320e-02, -5.8480e-02, 5.1379e-03]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([-0.0821, -0.0065, 0.2582, -0.0246, 0.1762, 0.0565, 0.2126, 0.1591,
-0.1606, 0.0420, 0.1157, 0.0448, -0.0329, 0.0493, 0.2167, -0.1600,
-0.1269, -0.0877, 0.0935, -0.0316, 0.2208, -0.0433, 0.0197, 0.0911,
-0.0007, -0.0014, 0.1137, 0.0278, 0.1320, -0.0244, -0.0820, -0.0522],
device='cuda:0')),
('fc_layers.0.weight',
tensor([[ 0.1739, 0.3091, 0.4058, ..., 0.1530, 0.0838, 0.0533],
[-0.0370, 0.0258, 0.2261, ..., 0.0966, 0.0134, 0.0040],
[-0.0502, -0.0594, -0.0134, ..., 0.0043, 0.0117, -0.0744],
...,
[-0.0237, -0.0521, -0.0024, ..., -0.0177, 0.0152, -0.0540],
[-0.0295, -0.0400, 0.0028, ..., -0.0096, -0.1130, -0.0282],
[-0.0213, 0.1385, 0.0036, ..., -0.1495, 0.1176, 0.1276]],
device='cuda:0')),
('fc_layers.0.bias',
tensor([-0.1024, -0.0462, 0.0068, -0.0396, -0.0425, -0.0667, -0.0197, 0.0134,
-0.0786, 0.2261, -0.0653, -0.0451, 0.1422, 0.0371, -0.0205, 0.1547,
-0.0770, 0.1879, 0.0143, -0.0985, 0.0206, -0.0226, -0.0105, 0.0461,
-0.0215, -0.0295, -0.0438, 0.0233, 0.0651, 0.1079, -0.0866, 0.0483,
0.1399, 0.1512, 0.2198, -0.0516, -0.0540, -0.0214, 0.2507, -0.0308,
0.0182, -0.0257, 0.0194, 0.0865, -0.0791, -0.0497, 0.0291, 0.1107,
0.3255, 0.0398, -0.0404, -0.0199, 0.0558, 0.0058, -0.1340, -0.0813,
0.0438, -0.0340, 0.0854, -0.0246, 0.0009, 0.1311, -0.0360, -0.0628,
-0.0312, -0.0632, -0.0549, 0.1108, -0.0068, 0.0727, -0.1471, -0.0272,
0.0132, -0.0508, -0.1707, -0.0015, -0.0693, 0.1137, 0.0385, -0.0765,
0.0367, -0.0317, 0.0528, -0.0515, 0.0282, -0.1666, 0.0091, -0.0746,
-0.0246, 0.0242, -0.0333, -0.0159, -0.0051, -0.0725, -0.0913, -0.0550,
0.0317, -0.0222, -0.0492, -0.0322, -0.0145, 0.0889, 0.0178, -0.0769,
-0.1303, -0.0179, -0.0039, 0.0031, -0.0518, -0.0446, -0.0228, 0.1509,
-0.0170, -0.0727, -0.0589, -0.0181, -0.0185, -0.0722, -0.0007, 0.1383,
-0.0049, 0.1894, 0.0089, 0.1209, 0.0396, 0.0065, -0.0639, -0.0084,
-0.0671, 0.0588, -0.0926, -0.1216, 0.0296, 0.1173, -0.1127, -0.0554,
-0.0036, -0.0512, -0.1087, -0.0638, -0.0597, -0.0231, 0.0838, -0.0598,
-0.0879, -0.0735, -0.0503, 0.0300, -0.0400, 0.0362, -0.0312, -0.0697,
-0.0396, 0.1075, -0.0226, 0.0656, -0.0611, 0.1257, -0.1279, -0.0399,
-0.0530, -0.0010, -0.0123, 0.0333, 0.0494, -0.0691, -0.0019, -0.0832,
0.0343, -0.0412, -0.0572, -0.0059, -0.0488, 0.0466, -0.0657, -0.0623,
0.0795, 0.0024, 0.0275, -0.0768, -0.0344, -0.0931, -0.0637, -0.0570,
0.0283, -0.0517, 0.3382, -0.0874, -0.0053, -0.0747, -0.0421, -0.0335,
0.0028, -0.0045, -0.0575, -0.0359, 0.1472, -0.0384, 0.1093, 0.1322,
-0.0198, -0.0167, -0.0084, 0.0124, -0.0549, -0.0277, 0.0644, 0.1447,
0.0590, -0.0433, 0.0220, 0.0166, 0.0598, 0.2059, 0.0155, 0.1295,
-0.0552, 0.0128, 0.0741, 0.0874, -0.0526, -0.0586, -0.0477, -0.0083,
0.0570, -0.0687, 0.0869, 0.0172, -0.0295, 0.0571, -0.0246, -0.0591,
-0.0360, 0.0125, -0.0036, -0.0474, 0.0187, -0.0433, -0.0687, -0.0194,
-0.0594, -0.0137, 0.0687, 0.0434, 0.0107, 0.0345, -0.0284, 0.0717,
-0.0321, -0.0004, -0.0395, -0.0331, -0.0486, -0.0280, -0.0303, -0.0320],
device='cuda:0')),
('fc_layers.2.weight',
tensor([[-2.6997e-02, -4.0435e-02, -4.6838e-02, ..., -6.1151e-02,
-9.2842e-02, 7.3749e-02],
[-3.9377e-02, -3.7186e-02, 5.7937e-03, ..., 1.6489e-02,
-1.5199e-02, -1.1204e-01],
[-1.7330e-01, 7.0227e-03, -4.6528e-03, ..., -2.4240e-03,
4.5879e-02, -8.9017e-02],
...,
[ 2.1434e-01, 1.2387e-01, 2.2002e-03, ..., 9.9940e-03,
2.5583e-02, -1.0719e-01],
[-1.3836e-01, 2.1901e-02, -2.5805e-02, ..., -4.1977e-02,
-2.1527e-02, 1.3057e-01],
[-1.7487e-02, -5.4257e-02, -4.8628e-02, ..., -3.7980e-03,
-2.3049e-05, 5.5816e-02]], device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.0410, 0.1498, 0.1057, 0.0991, 0.0914, -0.1236, -0.0936, -0.2316,
-0.0683, -0.1781], device='cuda:0'))])},
{'ratio': 0.25,
'bias': 128,
'train_losses': [281.56238065673,
256.8003983626607,
232.25528646846925,
213.95365970088966,
204.57522545871933,
198.33979355706387,
194.2361516534346,
191.36543038149364,
190.11627454012893,
187.41965167372638,
185.62255008557705,
183.99969166548465,
182.8408303533341,
179.76947096421873,
179.33376206923114],
'test_losses': [263.2846017164343,
242.94928404864143,
219.0203561969832,
202.6295892304065,
198.38487152959786,
193.02391494489183,
187.56543840146531,
186.37325460770552,
185.2710060044831,
184.73685585283766,
181.5177653911067,
178.72866377176024,
176.73611276757484,
174.82875047010535,
175.2575280526105],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[ 0.1675, -0.0598, 0.0269],
[-0.1683, -0.2501, -0.1849],
[-0.0050, -0.0344, 0.0618]],
[[ 0.1673, 0.1530, 0.2567],
[ 0.0444, -0.0009, 0.1203],
[-0.0507, -0.0090, -0.1184]],
[[ 0.0605, -0.0108, 0.1397],
[-0.0284, -0.1402, -0.1555],
[ 0.1473, -0.0378, -0.1384]]],
[[[-0.1972, 0.0158, -0.0017],
[-0.0390, -0.0618, -0.2247],
[ 0.1412, 0.2595, 0.0875]],
[[-0.1527, -0.2186, -0.2271],
[-0.0540, 0.0476, -0.1135],
[ 0.3295, 0.0249, 0.1850]],
[[ 0.1112, -0.1638, 0.0581],
[ 0.0923, -0.1001, -0.1120],
[ 0.0769, 0.1711, 0.0544]]],
[[[ 0.1789, 0.1255, 0.0847],
[ 0.0460, 0.0707, -0.0470],
[-0.2738, -0.0769, -0.1213]],
[[ 0.2452, 0.0935, 0.2593],
[ 0.0057, 0.0306, -0.0557],
[-0.2708, -0.3502, 0.0133]],
[[ 0.0419, 0.1929, -0.0273],
[ 0.0502, -0.1683, -0.0062],
[ 0.0749, -0.1167, 0.0108]]],
[[[ 0.1304, 0.2302, 0.1622],
[-0.0899, -0.0501, 0.0590],
[-0.0125, -0.1916, 0.0911]],
[[ 0.1171, -0.1424, -0.2678],
[-0.0696, 0.1265, -0.0370],
[-0.0126, -0.0663, -0.1160]],
[[ 0.0925, 0.2573, -0.1514],
[ 0.0445, 0.0341, 0.0198],
[-0.0755, 0.0189, -0.0913]]],
[[[ 0.2550, 0.0565, -0.1182],
[ 0.2203, -0.0995, -0.1563],
[ 0.0949, -0.0838, -0.1835]],
[[ 0.2114, 0.1747, -0.1538],
[ 0.1017, 0.0534, -0.1608],
[ 0.2102, -0.1534, -0.2987]],
[[ 0.1120, -0.0655, -0.0614],
[-0.0844, 0.1969, 0.0400],
[-0.0091, -0.0711, -0.0113]]],
[[[-0.0989, -0.0525, -0.2257],
[ 0.2264, 0.0323, -0.1453],
[ 0.1664, 0.2250, -0.1603]],
[[ 0.1308, 0.0875, -0.2775],
[-0.0553, -0.0331, 0.0327],
[ 0.2020, 0.0687, 0.1699]],
[[-0.2078, -0.1361, -0.0582],
[ 0.1252, -0.0178, 0.0274],
[ 0.0949, -0.0714, 0.0057]]],
[[[-0.0880, 0.0059, -0.0506],
[ 0.0619, 0.0443, 0.1104],
[ 0.2098, -0.0542, -0.1189]],
[[-0.1861, 0.0619, -0.0168],
[ 0.1781, 0.0282, 0.1574],
[-0.0380, 0.0928, -0.0850]],
[[ 0.0523, -0.3958, -0.0382],
[-0.1886, -0.1487, 0.0285],
[ 0.1961, -0.1207, 0.2188]]],
[[[-0.0761, 0.0163, 0.0531],
[-0.2276, -0.0036, 0.1699],
[-0.1055, -0.1076, 0.2597]],
[[-0.0603, -0.1327, 0.2517],
[ 0.0024, -0.1890, 0.1241],
[-0.1472, 0.0458, 0.0887]],
[[-0.1206, -0.2045, 0.0753],
[-0.0979, 0.1196, 0.1818],
[ 0.0209, -0.1460, 0.1761]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([-0.3138, 0.1593, 0.1477, -0.0860, 0.1560, 0.1586, -0.1894, 0.1045],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[ 0.0923, -0.2195, -0.0723],
[ 0.1291, -0.0583, -0.1611],
[ 0.0139, -0.0046, -0.1465]],
[[-0.0997, -0.1716, -0.0795],
[ 0.0127, -0.1761, -0.1638],
[ 0.0932, -0.1470, -0.0870]],
[[ 0.1452, 0.0488, -0.0468],
[ 0.0645, 0.1149, 0.0542],
[ 0.0830, 0.1034, 0.1701]],
...,
[[-0.1139, -0.2468, -0.0355],
[ 0.1522, -0.0989, -0.1650],
[ 0.2063, -0.0631, -0.1900]],
[[-0.1387, -0.1229, -0.1565],
[ 0.0744, 0.0295, -0.0307],
[ 0.0650, 0.0476, -0.0677]],
[[ 0.1620, 0.0522, -0.0543],
[-0.0282, 0.0459, 0.1133],
[ 0.0426, 0.0782, 0.1486]]],
[[[-0.0271, -0.0997, -0.0221],
[-0.1735, -0.1225, -0.1425],
[ 0.0447, 0.0560, -0.1850]],
[[ 0.0485, -0.0897, -0.0843],
[-0.0081, -0.1842, -0.2023],
[-0.1302, -0.1154, -0.1408]],
[[ 0.0396, 0.1011, 0.1253],
[-0.0081, 0.1154, 0.2126],
[ 0.1453, 0.2207, 0.1120]],
...,
[[ 0.0561, 0.1025, 0.0581],
[-0.0539, -0.1215, -0.1013],
[-0.0541, -0.0800, -0.1831]],
[[ 0.1944, 0.0617, -0.0827],
[-0.1245, -0.1827, -0.0656],
[-0.1853, -0.0020, 0.1834]],
[[ 0.0651, 0.2093, -0.0510],
[-0.1064, -0.0422, -0.2138],
[-0.1241, -0.2071, -0.0933]]],
[[[-0.0660, -0.1198, 0.0852],
[-0.0132, -0.1090, 0.0209],
[-0.1586, 0.0976, 0.0839]],
[[ 0.0807, -0.1100, -0.1648],
[-0.0998, 0.0186, -0.1052],
[-0.0705, 0.0456, -0.1018]],
[[-0.0147, -0.1348, 0.0525],
[-0.0967, -0.1191, 0.1760],
[-0.2676, -0.3129, 0.0675]],
...,
[[-0.0521, -0.1144, -0.0643],
[-0.0506, 0.0351, -0.0592],
[ 0.1176, -0.0387, -0.2292]],
[[ 0.1840, -0.0900, -0.0504],
[ 0.1244, 0.0393, -0.1288],
[ 0.1729, -0.0330, -0.0522]],
[[ 0.2696, 0.0299, -0.0868],
[ 0.1021, -0.0892, -0.0398],
[-0.0893, -0.1205, -0.0960]]],
...,
[[[-0.0292, -0.0410, -0.1358],
[-0.1130, -0.2772, -0.1439],
[-0.1371, -0.3195, -0.2638]],
[[-0.0225, -0.2015, -0.1297],
[-0.0843, -0.0020, 0.2044],
[-0.0330, -0.0048, 0.0787]],
[[ 0.1352, 0.1503, 0.0843],
[ 0.0650, 0.0224, -0.0550],
[-0.0928, -0.2163, -0.1392]],
...,
[[-0.1712, -0.0769, -0.2674],
[ 0.1111, 0.1402, 0.0836],
[-0.0143, -0.0302, -0.0256]],
[[ 0.2093, -0.0856, 0.0512],
[-0.0145, -0.1217, -0.1344],
[ 0.0763, -0.0692, -0.0143]],
[[-0.0956, -0.2300, -0.0225],
[ 0.1986, 0.0062, -0.0725],
[ 0.0832, -0.1016, -0.2439]]],
[[[ 0.0956, 0.0246, -0.0242],
[ 0.1148, 0.0231, 0.0209],
[ 0.3484, 0.1841, 0.0022]],
[[-0.1036, -0.2374, -0.2619],
[-0.0990, -0.1145, -0.2034],
[ 0.0442, 0.0132, 0.0513]],
[[ 0.0380, 0.1820, 0.1979],
[-0.1045, 0.0654, 0.0961],
[-0.0963, 0.0853, -0.0099]],
...,
[[ 0.0351, -0.0524, 0.0442],
[-0.0068, 0.0115, -0.0270],
[ 0.0277, -0.0572, 0.0156]],
[[ 0.0215, 0.1666, 0.0546],
[ 0.0124, -0.0014, -0.1143],
[ 0.0170, -0.0394, -0.1007]],
[[ 0.1069, 0.0627, 0.0644],
[-0.0088, 0.0945, 0.1184],
[ 0.0970, 0.0866, 0.0259]]],
[[[-0.2069, 0.0665, 0.0734],
[-0.3250, -0.0169, 0.0816],
[-0.5162, -0.1236, 0.0843]],
[[-0.0874, -0.1256, 0.0480],
[ 0.0012, -0.1495, -0.1200],
[ 0.1155, -0.0235, -0.1149]],
[[-0.0084, 0.0561, 0.0861],
[-0.1047, 0.0471, 0.0711],
[ 0.0125, 0.0328, 0.0609]],
...,
[[-0.0429, 0.0616, 0.2603],
[-0.0075, -0.0438, 0.1066],
[-0.1692, -0.0880, -0.0058]],
[[-0.1370, -0.0349, 0.0362],
[-0.1229, -0.1262, 0.0621],
[-0.2553, -0.2443, 0.0420]],
[[-0.0076, -0.0176, 0.1308],
[-0.0437, -0.0522, -0.0320],
[ 0.0835, -0.0745, -0.0144]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([ 0.1164, 0.0382, -0.0905, 0.0847, 0.2145, -0.2343, 0.1302, -0.3282,
0.1268, -0.2411, 0.0701, 0.1443, 0.0988, 0.1370, -0.3299, 0.1883],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[-0.1095, -0.0656, -0.1771],
[ 0.1568, -0.0056, -0.1729],
[ 0.1947, 0.2255, 0.0244]],
[[-0.1093, -0.2045, -0.2538],
[ 0.1282, 0.1139, -0.0367],
[ 0.0375, 0.1922, 0.0214]],
[[-0.0691, 0.0331, 0.0222],
[-0.1267, -0.1746, -0.1353],
[ 0.0018, 0.0165, 0.0564]],
...,
[[ 0.0292, 0.0692, -0.0219],
[-0.3376, -0.2647, -0.1070],
[-0.0407, -0.1068, 0.0514]],
[[-0.1259, -0.1074, 0.0749],
[-0.3046, -0.3124, -0.1162],
[-0.0484, -0.0533, 0.0912]],
[[-0.0062, -0.0455, 0.0290],
[ 0.0803, -0.0341, -0.2666],
[ 0.2740, 0.1377, -0.0374]]],
[[[-0.0904, -0.0785, -0.0097],
[-0.0121, -0.0166, 0.0100],
[ 0.0141, 0.0477, -0.0052]],
[[ 0.0252, -0.0636, 0.0381],
[-0.1159, 0.0245, -0.0305],
[-0.0925, 0.0084, -0.1337]],
[[-0.0725, -0.0708, 0.0063],
[ 0.0316, -0.0143, -0.0176],
[ 0.0364, 0.0044, -0.0456]],
...,
[[-0.0542, 0.0592, -0.0358],
[-0.0672, 0.0568, -0.0343],
[-0.0467, -0.0148, 0.0049]],
[[-0.0900, 0.0297, 0.0077],
[-0.0179, -0.1024, -0.0521],
[ 0.0032, 0.0622, 0.0207]],
[[ 0.0398, -0.0423, -0.0297],
[-0.0883, -0.0498, 0.0312],
[ 0.0769, 0.0716, -0.0064]]],
[[[ 0.1148, -0.0221, -0.0329],
[-0.0156, 0.0749, -0.0096],
[ 0.0423, -0.0050, -0.1125]],
[[-0.2032, -0.0738, 0.0016],
[ 0.0887, 0.0996, 0.2048],
[-0.0334, 0.1442, -0.0407]],
[[-0.2372, -0.1515, -0.0967],
[-0.1689, 0.0168, -0.0683],
[-0.0061, 0.1091, -0.1215]],
...,
[[-0.2292, -0.1652, -0.0893],
[-0.2584, -0.1487, -0.0564],
[-0.0830, -0.0670, -0.0104]],
[[ 0.0219, 0.0299, 0.0020],
[ 0.1534, 0.1581, 0.0833],
[ 0.2152, 0.2609, 0.1372]],
[[ 0.0607, -0.1570, -0.1316],
[-0.1134, -0.2614, -0.1131],
[-0.0110, 0.0637, 0.1134]]],
...,
[[[ 0.0246, -0.0126, 0.0550],
[ 0.0952, -0.1568, 0.0101],
[ 0.1120, -0.1517, -0.2929]],
[[-0.0445, -0.0380, -0.0571],
[ 0.0726, -0.0113, -0.0084],
[ 0.1339, 0.0503, -0.0837]],
[[ 0.0829, 0.1256, 0.0389],
[-0.0747, -0.0330, 0.0429],
[-0.0190, 0.1025, 0.0207]],
...,
[[ 0.1029, -0.0076, -0.0068],
[-0.1009, -0.1986, -0.0995],
[-0.1818, -0.0352, -0.0271]],
[[ 0.1323, -0.1627, -0.2471],
[ 0.1998, -0.0771, -0.2187],
[ 0.0691, -0.0770, -0.1488]],
[[ 0.0096, 0.0215, 0.2072],
[ 0.3307, 0.0750, 0.0279],
[ 0.3071, 0.0278, -0.0796]]],
[[[-0.0601, -0.1198, 0.0115],
[-0.2048, -0.2646, 0.0188],
[-0.0563, 0.1135, 0.1056]],
[[-0.0477, 0.0620, 0.2901],
[-0.1553, -0.0112, 0.2345],
[-0.0530, 0.0468, 0.1128]],
[[ 0.0606, -0.0296, -0.1666],
[ 0.0929, -0.0469, -0.0164],
[-0.0326, -0.0661, -0.0898]],
...,
[[-0.0506, 0.1036, 0.1113],
[-0.0016, -0.0416, -0.0165],
[-0.1897, -0.0308, 0.0705]],
[[-0.0992, -0.1556, -0.1470],
[-0.3597, -0.3496, -0.0893],
[-0.1810, 0.0054, 0.0219]],
[[ 0.1221, -0.0403, -0.0671],
[-0.0316, -0.1278, -0.0210],
[ 0.1822, 0.2151, 0.0880]]],
[[[ 0.0414, 0.0199, -0.0809],
[ 0.0401, 0.0947, -0.0527],
[ 0.0020, 0.1193, 0.1113]],
[[ 0.1354, 0.1348, 0.0739],
[ 0.0752, -0.0448, -0.0802],
[ 0.1728, 0.1087, -0.0273]],
[[ 0.0017, -0.2888, -0.3982],
[ 0.0997, -0.1119, -0.2367],
[-0.0183, -0.1725, -0.2882]],
...,
[[ 0.0648, -0.0906, -0.0727],
[ 0.0699, -0.1473, -0.1992],
[-0.1148, -0.1261, -0.1577]],
[[ 0.1226, 0.1267, -0.0759],
[-0.0473, 0.0997, -0.0816],
[ 0.1286, 0.0884, 0.0120]],
[[ 0.0310, -0.0499, 0.1054],
[ 0.0834, 0.0649, -0.0567],
[ 0.1381, -0.0086, 0.0077]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([ 0.1170, -0.0907, -0.3221, 0.1643, 0.0521, 0.1115, 0.0172, -0.0182,
-0.1222, 0.0238, -0.4677, -0.0653, 0.1273, 0.1242, 0.0221, 0.0509,
-0.2194, -0.0923, 0.0988, -0.0648, 0.1002, -0.0164, 0.2442, 0.0290,
-0.1497, 0.1456, -0.0010, 0.1501, 0.0158, -0.0874, 0.0126, -0.1712],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[ 2.2499e-02, 9.5398e-02, 5.4118e-02],
[-8.5184e-02, -9.3538e-03, -1.1969e-01],
[ 1.7189e-02, -5.7248e-03, -2.4480e-02]],
[[-5.6292e-02, 9.8211e-03, -1.5957e-02],
[ 3.5376e-02, -3.3163e-02, -4.2399e-03],
[ 8.9650e-03, 4.2595e-02, 3.6416e-03]],
[[-6.0216e-02, 4.9249e-02, 4.5291e-03],
[ 4.4950e-03, -1.0195e-01, -5.5992e-02],
[ 4.8854e-02, -6.1603e-02, -1.3531e-02]],
...,
[[-5.9785e-02, -5.0377e-02, -4.7079e-02],
[-3.3527e-02, -9.4137e-03, -5.7852e-03],
[-1.0432e-01, -4.9809e-02, -1.1752e-01]],
[[-4.5568e-02, -4.7367e-02, -1.1810e-01],
[-1.0504e-02, 4.6055e-02, -7.0404e-02],
[-1.3441e-01, 6.3803e-03, -3.8274e-02]],
[[-8.0715e-03, -6.4791e-03, -2.6333e-02],
[-1.9942e-02, -6.1538e-02, -5.5139e-02],
[ 1.0986e-02, -7.9742e-02, -6.7536e-04]]],
[[[-6.6232e-03, -9.7663e-02, -2.3855e-01],
[-1.1025e-01, -1.0374e-01, -9.9613e-02],
[-1.3647e-01, 9.4773e-04, 1.9124e-02]],
[[ 2.7352e-02, -3.5636e-03, -5.9302e-02],
[-2.1908e-02, 4.3565e-02, -2.8521e-03],
[-6.7032e-02, 4.4339e-04, -6.1686e-03]],
[[ 4.3294e-03, -2.0463e-01, -1.8561e-01],
[ 3.5421e-02, -4.3554e-02, -6.3853e-02],
[-1.3921e-01, -2.5576e-01, -4.8896e-02]],
...,
[[-2.4714e-01, -1.3648e-01, -2.1607e-03],
[-1.3872e-01, -8.1666e-02, -1.2029e-01],
[-2.0942e-02, -1.2486e-01, -7.4010e-02]],
[[-4.8388e-02, -2.5921e-02, -1.3080e-02],
[-1.9414e-01, -1.7904e-01, -1.5131e-01],
[-1.5580e-01, 1.1327e-01, 6.9613e-02]],
[[-1.1243e-01, -9.3070e-03, -1.3363e-01],
[-1.2362e-01, -1.6030e-02, -2.3503e-01],
[ 3.1975e-02, -6.5936e-02, -2.6894e-01]]],
[[[-1.6156e-02, -2.0937e-01, -2.9457e-01],
[ 1.5393e-01, -1.3963e-01, -1.2838e-01],
[ 3.4906e-02, 1.2188e-02, -9.7188e-02]],
[[ 7.7738e-03, 4.0304e-03, -4.0938e-03],
[ 4.0679e-02, 4.0955e-02, -4.4811e-02],
[ 7.9978e-03, 2.6992e-02, 6.0060e-02]],
[[ 1.3432e-01, -1.7966e-04, -2.3535e-01],
[ 1.5863e-01, -1.1156e-01, -2.4179e-01],
[ 2.2223e-02, -2.3583e-01, -2.5151e-01]],
...,
[[ 1.2914e-02, 3.6835e-02, 4.5416e-02],
[-2.3667e-02, 3.6895e-02, 5.1462e-02],
[-8.2381e-02, 3.1130e-02, 2.5622e-02]],
[[ 8.8058e-02, 1.0707e-01, 4.2351e-02],
[ 3.5376e-02, 6.8796e-02, 7.8662e-02],
[ 1.2457e-01, 1.1543e-01, -3.8531e-02]],
[[ 1.3559e-01, -2.0228e-02, 9.2040e-02],
[ 1.6881e-01, 2.8056e-02, -7.8653e-02],
[ 1.2209e-01, -4.7966e-02, -2.1952e-01]]],
...,
[[[ 3.1097e-02, -7.3077e-03, -2.2142e-02],
[-1.1920e-02, -7.1635e-02, -9.3207e-02],
[ 4.3518e-02, 7.0689e-02, -1.7972e-02]],
[[-3.6344e-02, -5.2732e-02, -4.5408e-02],
[ 1.5622e-02, 5.4551e-02, 2.2838e-02],
[-1.3247e-02, 4.3958e-02, -2.1330e-02]],
[[-6.2166e-02, -6.9930e-02, -2.6256e-02],
[ 8.9073e-03, -6.9877e-02, 7.2819e-02],
[-7.8599e-02, 3.0021e-03, 6.0625e-02]],
...,
[[-5.7837e-02, -3.3123e-02, 4.5942e-02],
[-3.1458e-02, -7.5388e-02, -9.0068e-02],
[-5.9488e-03, -8.0074e-02, -6.8852e-02]],
[[-2.1781e-02, 3.0374e-02, -6.7051e-02],
[ 7.2290e-02, -5.4341e-02, -3.1834e-02],
[-3.1686e-02, -4.0892e-02, -5.8874e-02]],
[[-5.9701e-02, 3.7485e-02, -1.9764e-02],
[-4.8876e-03, -9.5781e-02, 2.3818e-02],
[-8.7127e-02, -7.2963e-02, 6.6764e-02]]],
[[[ 3.1632e-02, 7.9685e-02, -1.5331e-02],
[-1.8272e-02, 9.4694e-03, -5.8192e-05],
[-8.2994e-02, 6.4579e-02, 7.6400e-03]],
[[-3.6541e-02, 8.8866e-03, 3.1687e-02],
[-2.6547e-02, -2.5616e-02, -5.8962e-02],
[-3.4145e-02, -5.1740e-02, 2.4353e-02]],
[[-5.6845e-02, -6.4097e-02, 3.3790e-02],
[-5.8957e-03, -4.4793e-05, 3.4097e-02],
[-9.5500e-02, -6.5940e-02, -9.0149e-02]],
...,
[[-1.7610e-02, 3.5972e-02, -2.1824e-02],
[-9.0513e-02, -3.5909e-02, 4.0565e-02],
[-9.1182e-02, -7.9824e-02, -3.3636e-02]],
[[-5.7219e-02, -2.0720e-02, 1.7318e-02],
[-6.1422e-02, -1.0038e-01, -5.1097e-02],
[-5.7999e-02, -3.5617e-02, -8.3080e-02]],
[[-8.5554e-03, -8.9858e-02, -3.9716e-02],
[-5.0665e-02, -8.9520e-02, -9.3181e-02],
[-8.7556e-02, -3.9547e-02, -3.7038e-02]]],
[[[-1.1099e-01, -4.8292e-02, -7.9537e-02],
[-6.0348e-02, -3.4797e-02, 1.7809e-02],
[ 4.0387e-03, 2.1653e-02, -5.1121e-02]],
[[ 4.6280e-02, 4.9607e-02, 2.2472e-02],
[ 5.1504e-03, 3.1113e-02, 5.2987e-03],
[ 4.6701e-02, 1.9525e-02, 4.9127e-02]],
[[ 5.0615e-02, -9.3610e-03, -6.1184e-04],
[ 4.9639e-02, 1.4825e-02, -5.2637e-02],
[-7.8584e-02, -6.0898e-02, -1.4560e-02]],
...,
[[-1.5894e-02, -1.0547e-01, -5.7193e-02],
[ 1.0704e-02, -7.1519e-02, -1.6435e-02],
[-7.7828e-02, 1.5730e-02, -2.4455e-02]],
[[-5.7263e-02, -6.7517e-02, -4.5756e-02],
[-2.7278e-02, -1.0319e-01, -2.9549e-02],
[ 2.7609e-02, -4.4648e-02, 1.2991e-02]],
[[ 8.8479e-03, 5.7747e-03, 7.6098e-03],
[-4.3104e-03, -6.3051e-02, -5.2788e-02],
[-2.7466e-02, -5.3770e-02, -5.3491e-02]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([-0.0701, 0.0874, 0.2105, 0.1985, -0.0763, -0.0341, 0.0361, 0.0278,
0.0160, -0.1227, -0.0686, -0.0413, -0.0234, -0.1185, 0.0083, 0.0371,
0.2181, 0.0576, -0.0253, -0.0825, -0.0932, -0.1176, 0.1491, -0.0632,
-0.0184, 0.1843, 0.2265, 0.0545, -0.0471, -0.0916, -0.1022, -0.0783],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[-6.7597e-02, -2.2623e-02, -3.7824e-02],
[-1.1393e-02, -1.5503e-02, -7.0454e-02],
[ 2.1758e-02, 3.2858e-02, -8.1761e-02]],
[[ 6.5973e-02, -1.3218e-01, -1.4976e-01],
[-2.8890e-02, 9.3473e-02, 8.4206e-02],
[ 7.9875e-02, 1.3171e-01, 1.4811e-01]],
[[ 7.3549e-02, -1.5380e-01, -9.9707e-02],
[-6.1073e-02, -1.7569e-01, 2.1596e-02],
[-1.4011e-01, 2.5301e-02, 2.2501e-01]],
...,
[[ 2.1612e-02, 4.4848e-02, -4.6539e-02],
[ 6.4514e-03, -6.3685e-02, 3.3970e-02],
[-1.2711e-01, -6.2470e-02, -3.4147e-02]],
[[-5.2444e-02, -3.4956e-02, 1.3939e-02],
[-2.7429e-03, -1.2877e-02, -6.5347e-02],
[ 5.9422e-02, -5.9410e-02, 8.3500e-02]],
[[ 2.8748e-02, -1.0087e-02, -5.8701e-02],
[ 7.1968e-02, -8.3127e-03, 1.9860e-02],
[ 3.8451e-02, -2.5932e-02, 7.7579e-03]]],
[[[-4.4868e-02, -4.9358e-02, 3.2435e-03],
[-2.3528e-02, -2.2924e-02, -5.7670e-02],
[-2.6765e-02, 4.7664e-03, 2.4311e-02]],
[[ 1.6071e-01, 2.2619e-01, 2.2369e-01],
[ 1.1050e-01, 9.4455e-02, -1.5801e-02],
[-1.6725e-01, -2.5303e-01, -5.0954e-02]],
[[ 8.9513e-02, 1.9090e-01, 1.3944e-01],
[ 1.0323e-01, 6.1181e-02, -7.0974e-03],
[-2.5250e-02, -1.4178e-01, -1.9049e-01]],
...,
[[ 7.6920e-02, -2.6043e-02, 4.6554e-02],
[-7.0690e-02, 4.5809e-02, -6.5212e-02],
[ 2.9629e-02, -2.5024e-02, -1.1038e-01]],
[[ 5.6492e-02, -4.6565e-02, 5.5285e-02],
[ 9.4170e-03, -5.6970e-02, -1.3783e-02],
[ 3.5964e-02, -6.5171e-02, -4.5856e-03]],
[[-6.1914e-02, 3.9726e-02, 3.3185e-02],
[ 3.7572e-03, -9.2254e-02, 2.3387e-02],
[ 7.0046e-02, 4.1782e-02, 6.9764e-03]]],
[[[-5.0502e-02, -7.6543e-02, 6.8452e-02],
[-2.6470e-02, -1.7197e-02, 9.1523e-02],
[ 4.4173e-02, -2.2798e-02, 5.8979e-02]],
[[-2.0470e-01, -4.6242e-01, -2.2106e-01],
[-1.5567e-02, -8.3598e-02, 1.5482e-02],
[ 1.5297e-01, 9.9629e-02, 1.2969e-01]],
[[-1.1514e-02, -1.3224e-01, -3.1052e-02],
[ 1.5594e-02, -9.3898e-02, 4.6488e-03],
[ 5.2775e-02, 4.2825e-02, 1.3275e-01]],
...,
[[ 3.2786e-03, 1.0071e-02, 1.6233e-02],
[ 1.2771e-02, -5.8623e-02, -6.8939e-02],
[-5.6003e-02, -1.4757e-01, -1.3082e-02]],
[[-1.7594e-02, -4.3532e-02, -7.4250e-02],
[ 1.7378e-02, 3.4521e-02, -1.8281e-02],
[ 1.6654e-02, 3.3445e-02, 1.1780e-04]],
[[ 2.6648e-02, 3.7068e-02, -9.4881e-02],
[-4.9801e-02, 3.3835e-02, -5.0964e-02],
[ 6.7335e-03, 1.2063e-01, -1.0524e-02]]],
...,
[[[-2.0060e-02, 5.4304e-02, -2.2409e-02],
[-7.2953e-02, -4.8014e-02, -7.4707e-03],
[ 6.8767e-02, -1.1151e-03, -3.9253e-02]],
[[ 1.1264e-01, 6.9874e-02, 4.1867e-02],
[ 1.4520e-01, 1.6795e-01, -2.8196e-02],
[ 6.2290e-02, 1.1198e-01, -1.2909e-01]],
[[ 4.4159e-02, 7.9707e-02, -7.3394e-02],
[-4.3494e-02, 1.2195e-01, 1.0244e-01],
[ 2.4780e-02, -2.1200e-02, 4.5481e-02]],
...,
[[ 3.3009e-03, -8.4016e-02, -1.8705e-02],
[-4.1318e-02, -4.5744e-02, -2.4882e-02],
[ 4.8002e-02, -6.2031e-02, -2.8618e-02]],
[[ 1.8986e-02, 4.3099e-02, -6.2653e-02],
[-1.6298e-02, 4.2192e-02, -3.0712e-02],
[-5.1432e-02, -4.1649e-03, -2.6098e-02]],
[[ 6.5936e-02, 3.0743e-02, 6.8056e-02],
[ 8.4438e-02, -1.9235e-02, 8.2303e-02],
[ 6.0637e-02, -3.2076e-02, 1.1498e-02]]],
[[[-3.4342e-02, 4.8407e-02, 2.5302e-02],
[-5.4595e-02, -4.2572e-02, 6.5915e-02],
[ 6.8476e-02, -3.1994e-03, -6.9858e-02]],
[[ 3.2660e-02, 2.0210e-02, 9.3262e-03],
[-3.7566e-02, 7.0298e-02, 3.9312e-02],
[-1.0916e-01, 1.5218e-02, 6.5632e-02]],
[[ 2.2846e-01, 3.5605e-01, 9.3237e-02],
[-3.1009e-02, 7.9167e-02, 6.4556e-03],
[-3.0393e-01, -3.3808e-01, -1.2728e-01]],
...,
[[ 2.9710e-02, 1.5261e-02, -9.7465e-03],
[ 4.6694e-02, 5.1595e-02, 3.3770e-02],
[-2.7946e-02, -3.8393e-02, 1.8574e-02]],
[[-6.0716e-02, -1.6147e-02, 8.1643e-02],
[-8.2329e-02, -8.8352e-02, 5.4603e-02],
[ 6.3852e-02, 3.7089e-02, 6.4789e-02]],
[[ 1.2117e-02, -6.8204e-03, -5.7910e-02],
[-6.8226e-02, -6.8275e-02, -1.0155e-02],
[-9.3282e-03, -6.3334e-02, -9.9689e-03]]],
[[[-3.6639e-03, -1.8986e-02, 2.2272e-02],
[ 2.4665e-02, 6.5914e-02, -1.0204e-02],
[ 1.1988e-02, -2.6245e-02, -3.4486e-03]],
[[-1.0102e-02, 2.9737e-02, -4.2158e-02],
[ 7.4576e-02, 3.4819e-02, -5.4649e-02],
[ 2.8291e-02, 5.1259e-02, -8.0136e-02]],
[[ 5.2500e-02, 1.0077e-01, 3.4460e-03],
[ 1.4086e-01, 1.1228e-01, -4.9805e-02],
[ 1.4433e-01, 1.3624e-01, 1.4142e-02]],
...,
[[-8.8891e-04, 3.0176e-02, -3.3976e-02],
[-4.8131e-02, -4.3356e-03, -1.1760e-01],
[ 9.9343e-02, 2.0304e-02, 5.5851e-02]],
[[-2.1679e-02, 2.8742e-02, 6.4630e-02],
[ 2.4566e-02, 8.5859e-02, 2.3977e-02],
[ 1.3656e-02, -5.1258e-02, 5.1999e-02]],
[[-4.9977e-02, -5.9605e-02, -6.6550e-02],
[ 7.2680e-02, 2.2778e-03, -4.5316e-02],
[ 8.5945e-02, 4.7251e-02, 4.7238e-02]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([ 0.0642, 0.0687, 0.0960, -0.1309, -0.0766, -0.0367, 0.0795, -0.1143,
0.0122, -0.0952, -0.2056, -0.0745, -0.0935, 0.1351, -0.0816, -0.1402,
0.0122, 0.0331, -0.0191, 0.0293, -0.1192, 0.0560, -0.1193, -0.0699,
0.0624, 0.0039, -0.1262, -0.0324, 0.0562, 0.0866, 0.0118, 0.1744],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[-0.0157, -0.0241, -0.0030],
[ 0.1158, 0.0615, -0.0514],
[ 0.2179, 0.0440, -0.0648]],
[[-0.1214, -0.0921, 0.0742],
[-0.3494, -0.4921, -0.0919],
[-0.3144, -0.5246, -0.0433]],
[[ 0.0083, 0.0114, -0.0308],
[ 0.0253, 0.1199, 0.0072],
[ 0.0364, 0.1593, 0.0162]],
...,
[[ 0.1927, -0.0028, -0.1924],
[-0.0065, 0.0677, -0.1100],
[-0.0778, 0.0765, -0.0365]],
[[-0.1485, -0.0223, 0.0312],
[-0.0632, -0.1043, -0.0043],
[ 0.2089, 0.1768, 0.1205]],
[[-0.0901, 0.0607, 0.0349],
[ 0.1711, 0.1077, -0.0997],
[ 0.2479, 0.0641, -0.1806]]],
[[[-0.0399, -0.0421, -0.0170],
[ 0.0521, 0.0477, -0.0273],
[-0.0779, 0.0046, -0.0104]],
[[-0.0699, -0.0046, -0.0009],
[-0.0567, -0.0499, -0.0719],
[-0.0861, -0.0008, -0.0276]],
[[-0.0588, -0.0803, 0.0073],
[-0.0265, -0.1075, -0.0756],
[-0.1067, 0.0050, -0.0433]],
...,
[[ 0.0524, -0.0296, -0.0160],
[-0.0171, -0.0843, -0.0372],
[ 0.0058, -0.0566, -0.0169]],
[[ 0.0074, -0.0392, -0.0449],
[-0.0417, -0.0377, -0.0287],
[ 0.0076, -0.0481, -0.0283]],
[[-0.0831, 0.0108, -0.0684],
[-0.0427, -0.0745, 0.0053],
[ 0.0226, -0.0104, -0.0376]]],
[[[-0.0059, -0.0288, 0.0615],
[-0.0665, -0.0075, 0.0628],
[-0.2120, 0.0427, 0.2499]],
[[-0.1198, -0.1782, -0.0528],
[-0.0658, -0.1088, 0.0557],
[ 0.1454, 0.0912, 0.0742]],
[[ 0.0072, -0.1170, -0.1770],
[-0.0097, 0.0052, -0.0366],
[ 0.0406, 0.1253, 0.1294]],
...,
[[-0.3158, -0.1892, -0.2053],
[ 0.0139, -0.0574, -0.0418],
[ 0.1701, 0.0420, -0.1062]],
[[-0.1497, -0.0989, 0.0508],
[-0.1117, -0.0514, 0.0627],
[ 0.1754, 0.2754, 0.2717]],
[[ 0.1550, 0.0721, 0.0459],
[ 0.0802, 0.0918, 0.0702],
[ 0.0237, 0.0426, 0.0196]]],
...,
[[[-0.0036, 0.0632, 0.0356],
[-0.1030, 0.1392, 0.2130],
[-0.3176, 0.1522, 0.1268]],
[[-0.1867, -0.1173, 0.0041],
[ 0.0188, -0.1691, -0.2422],
[ 0.0940, -0.0426, -0.0805]],
[[ 0.0402, 0.0052, -0.0413],
[-0.1049, 0.0195, 0.1118],
[-0.2465, -0.1897, -0.1132]],
...,
[[ 0.0954, 0.0871, 0.0857],
[-0.0667, -0.0365, -0.0033],
[-0.0174, -0.1704, 0.0495]],
[[-0.1103, -0.2273, -0.1476],
[-0.4409, -0.2488, 0.0993],
[-0.1082, 0.0335, 0.1200]],
[[-0.1043, -0.1367, -0.2143],
[-0.0118, -0.0871, -0.1099],
[-0.0577, 0.0369, -0.0366]]],
[[[ 0.1990, -0.0632, -0.0543],
[ 0.2795, -0.0017, 0.0217],
[ 0.1463, -0.0954, -0.0863]],
[[-0.2794, -0.2505, -0.1524],
[-0.2578, -0.1480, -0.1687],
[-0.1726, -0.1450, -0.0809]],
[[-0.1080, -0.1144, -0.0341],
[-0.0554, -0.0279, 0.1196],
[ 0.0324, 0.0468, -0.0197]],
...,
[[-0.0211, 0.0817, -0.0566],
[ 0.1329, 0.0768, -0.0214],
[ 0.1156, 0.0558, -0.0220]],
[[ 0.2027, 0.1367, 0.1969],
[ 0.0067, 0.1054, 0.0556],
[ 0.0431, 0.0195, 0.0818]],
[[-0.1853, 0.0925, 0.1008],
[-0.2060, -0.0107, 0.1009],
[-0.1971, 0.0075, 0.1347]]],
[[[-0.2093, -0.2286, 0.1980],
[-0.1248, -0.3066, -0.0016],
[-0.0551, -0.0301, 0.0122]],
[[ 0.0455, 0.0046, -0.0264],
[ 0.0396, 0.0395, 0.0472],
[-0.0117, 0.0666, -0.0025]],
[[-0.3044, -0.0493, -0.0722],
[-0.2305, -0.1953, -0.0893],
[ 0.0196, -0.0168, 0.0506]],
...,
[[ 0.0100, -0.0525, -0.2186],
[ 0.1269, 0.0824, -0.1235],
[-0.0502, 0.0400, 0.1078]],
[[-0.0763, -0.1898, -0.0552],
[-0.0029, 0.0084, -0.0787],
[-0.2265, -0.0434, -0.0991]],
[[ 0.1282, -0.0014, -0.0596],
[ 0.1046, 0.0548, -0.0689],
[ 0.0313, -0.0634, -0.1000]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([-0.0362, -0.1238, 0.1508, -0.0672, -0.0895, -0.1289, -0.0671, 0.0917,
0.1817, 0.2171, -0.0463, 0.0567, 0.1985, -0.0070, -0.1357, 0.0536,
-0.1054, -0.0697, 0.1855, -0.0347, -0.0095, -0.1292, -0.1055, -0.1075,
-0.1449, -0.0199, -0.0051, 0.0563, 0.0597, -0.0155, -0.0870, -0.1826],
device='cuda:0')),
('fc_layers.0.weight',
tensor([[-0.0632, -0.1070, 0.1711, ..., 0.0174, 0.0452, -0.0919],
[-0.0005, 0.0900, -0.0187, ..., 0.0654, -0.1165, -0.2872],
[ 0.1493, 0.0440, 0.0050, ..., -0.0926, -0.0499, -0.0843],
...,
[ 0.2435, 0.1394, 0.2350, ..., -0.1776, -0.1793, -0.1184],
[-0.1207, 0.0498, 0.0855, ..., 0.0341, -0.0382, 0.0300],
[ 0.1144, -0.0924, 0.0154, ..., -0.0063, 0.0729, -0.0113]],
device='cuda:0')),
('fc_layers.0.bias',
tensor([-7.5471e-02, -4.7044e-02, 3.9739e-02, 9.6437e-03, -1.2349e-01,
-9.5434e-02, -7.7310e-02, 5.8749e-02, -8.7424e-02, 4.1897e-02,
5.2782e-02, -4.4698e-02, -7.2793e-02, 9.8761e-03, -7.5268e-02,
1.4382e-02, 1.0719e-01, -7.0671e-03, 8.8790e-02, -4.5117e-03,
4.2813e-02, 1.7872e-01, -7.0900e-02, -4.5067e-02, 5.0628e-02,
-1.6093e-03, 1.6532e-01, -4.1232e-02, 1.1471e-01, -1.1077e-01,
-6.7515e-02, -3.8015e-02, 7.9815e-02, -4.4343e-02, 8.0705e-02,
7.7868e-02, -6.2276e-02, -5.2916e-02, 3.6619e-02, -1.2397e-01,
-5.5869e-02, 1.9107e-02, -2.9131e-02, 1.7017e-01, 2.0321e-01,
6.0804e-02, -4.4136e-02, -1.2702e-01, 1.3369e-01, -3.7582e-02,
-1.2702e-02, 2.1759e-01, -2.8186e-02, -1.3208e-01, -6.3909e-02,
-6.7310e-02, 1.6468e-02, -2.5479e-02, 1.4459e-01, -1.8700e-01,
1.9330e-02, 2.2157e-02, -4.2694e-02, 7.6497e-03, -2.2465e-02,
5.7866e-02, -3.6289e-02, -5.7828e-02, 3.7724e-02, 1.0670e-02,
-7.4934e-02, 2.0984e-01, -1.3130e-01, -2.2147e-02, -1.2421e-01,
1.2392e-01, -5.1247e-02, -4.4388e-03, -7.1871e-02, -3.4275e-02,
-3.8342e-03, -9.2756e-04, -8.9739e-04, -1.0691e-01, 1.9836e-01,
-6.2095e-02, -2.8143e-02, -3.2492e-02, -3.0367e-02, 1.9777e-03,
-2.9271e-02, 3.3611e-02, -1.1885e-02, -9.9917e-02, -1.1450e-02,
-3.3153e-03, 3.7405e-02, -5.7588e-02, 1.4566e-01, -3.9004e-02,
3.3160e-02, -3.2023e-02, 6.3657e-02, -6.4459e-02, 1.7269e-02,
-3.3321e-03, 2.4064e-02, 1.0173e-02, -4.5089e-02, 2.6670e-02,
9.3188e-02, -9.8245e-02, -6.2308e-02, 9.2468e-02, 1.2805e-01,
1.8405e-02, 4.2817e-02, 9.9239e-02, -9.0405e-02, 9.4603e-02,
-8.2715e-02, -1.2272e-02, -2.0346e-02, 3.0300e-01, 2.1189e-02,
-6.6701e-02, -2.6220e-02, -6.8816e-02, -5.1127e-02, 1.9070e-02,
2.4801e-02, -9.5132e-03, 4.8540e-02, -5.2171e-02, -7.0649e-02,
-9.3554e-03, -1.2309e-01, 1.6512e-01, 7.2760e-02, -5.3055e-02,
-8.1671e-02, -4.1623e-02, 3.5022e-02, 3.4548e-03, -4.0705e-02,
2.3827e-03, -3.5331e-03, -5.1170e-02, -7.4705e-02, 7.8367e-02,
1.5220e-01, -3.7113e-02, -1.7159e-02, 7.0539e-02, -1.5989e-01,
-4.0490e-04, -3.3790e-03, 1.1547e-01, -3.9307e-02, -4.4892e-02,
-4.6770e-02, 4.8041e-02, 3.7266e-02, 1.0190e-02, 1.8835e-01,
-6.2015e-03, -2.5403e-02, 2.8843e-02, 2.9666e-02, -1.8740e-02,
-6.0407e-02, 1.4837e-02, 5.7794e-02, 1.2535e-01, -3.8148e-02,
4.0933e-03, -3.7598e-02, -4.4102e-03, 3.5381e-02, 9.3976e-03,
2.8180e-02, -4.2790e-02, -4.0386e-02, 2.0871e-02, -4.8649e-02,
-5.4535e-02, -5.6639e-02, 4.5061e-02, 2.7846e-02, 1.9222e-02,
-1.3682e-03, -1.1697e-01, -2.4850e-02, -6.6322e-02, -1.1004e-01,
-4.6950e-02, 7.9575e-02, -4.5633e-03, -1.2653e-01, -9.4538e-03,
1.7077e-01, -5.4675e-02, 6.0004e-02, -6.2390e-02, -3.8895e-02,
-2.0436e-02, -1.6660e-02, -3.6358e-02, 2.4988e-02, -5.1855e-03,
-3.6969e-02, 3.0877e-02, -6.6711e-02, -1.3383e-02, -8.8303e-02,
-4.2771e-02, -7.1090e-02, -7.7083e-02, 9.8688e-02, -3.6544e-02,
1.3515e-01, -3.7525e-02, 2.4684e-02, -3.5053e-02, 1.6431e-02,
2.6296e-02, -3.8674e-02, 1.8256e-01, 5.3972e-02, -8.3565e-02,
-3.3257e-02, -4.5462e-02, 2.0903e-02, 2.1551e-02, 8.1844e-02,
1.8170e-02, -3.6580e-02, 3.5528e-02, -4.3933e-02, 8.8120e-03,
-3.6784e-02, -1.6342e-02, -6.8054e-02, -3.6240e-02, -5.5788e-02,
3.6599e-02, -1.6098e-04, -9.0993e-02, -8.4473e-03, -1.4432e-02,
-8.2532e-02, -4.1277e-02, -3.7654e-02, 1.0343e-01, -2.5241e-02,
3.5380e-02], device='cuda:0')),
('fc_layers.2.weight',
tensor([[ 7.3107e-02, -9.8385e-02, 1.0004e-01, ..., 1.7022e-01,
1.1104e-01, -3.8357e-05],
[-1.8997e-01, 3.1055e-01, 2.5749e-02, ..., 1.7469e-01,
6.8426e-02, 2.3044e-01],
[ 1.9349e-02, -1.5605e-01, 6.2200e-02, ..., -1.1195e-01,
-2.9346e-01, -1.4345e-01],
...,
[-1.7331e-01, -2.2483e-02, -4.1710e-02, ..., -1.3351e-01,
9.7311e-03, 9.5782e-02],
[ 4.6591e-02, -1.0744e-01, 5.8243e-02, ..., -1.1312e-02,
9.9584e-02, -1.1631e-01],
[ 9.1322e-02, -1.1081e-01, -1.2459e-01, ..., -2.6850e-02,
-1.6068e-01, -1.1225e-01]], device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.0914, 0.0754, 0.1927, 0.1790, 0.0668, -0.0364, -0.1110, -0.1232,
-0.0725, -0.1693], device='cuda:0'))])},
{'ratio': 0.42,
'bias': 0,
'train_losses': [277.03645708423636,
229.0851925615657,
196.30664386258283,
181.31476827753775,
172.82572058066856,
167.47407222584695,
163.6433367539867,
159.9462068010167,
157.56088399554216,
156.28887091319598,
154.39115000852948,
152.52830158644738,
150.76666877427857,
151.06974269457513,
148.31644893191873],
'test_losses': [248.96016731449203,
204.9258733917685,
184.7824100045597,
171.47654610054167,
167.85912068684897,
160.35684847364237,
154.6211293818904,
155.4042888435663,
151.2020237305585,
149.01881712558222,
149.55032587986366,
146.14755183107712,
145.6808856047836,
146.3899157701754,
143.7829731539184],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[-0.1646, -0.0546, 0.2814],
[-0.0253, -0.0351, 0.2038],
[-0.2294, 0.0606, -0.0436]],
[[-0.2291, 0.2258, 0.2068],
[-0.2430, -0.0305, 0.1086],
[-0.2014, -0.1390, 0.2092]],
[[-0.1062, 0.1745, -0.0764],
[-0.0217, 0.1362, 0.0196],
[-0.0332, 0.0198, -0.0583]]],
[[[-0.3346, -0.0034, 0.0457],
[-0.2189, 0.1704, 0.0117],
[ 0.1402, 0.1379, 0.1003]],
[[-0.2955, -0.0168, -0.1081],
[ 0.0236, 0.0903, -0.0620],
[-0.0274, 0.1257, 0.3024]],
[[-0.2064, -0.0874, 0.0785],
[-0.1384, 0.1704, -0.0351],
[ 0.1381, -0.0271, 0.0383]]],
[[[ 0.1964, 0.2629, 0.0713],
[-0.1290, 0.0959, -0.1398],
[-0.1156, -0.2175, 0.0052]],
[[ 0.2074, 0.0276, 0.1217],
[ 0.1119, -0.0513, -0.0963],
[-0.1621, -0.2571, -0.0640]],
[[ 0.1526, 0.0041, 0.1493],
[ 0.1002, 0.0914, -0.1411],
[-0.1726, -0.1515, 0.0932]]],
[[[-0.0061, -0.1820, -0.2217],
[-0.1127, 0.0367, 0.0807],
[ 0.0144, 0.1557, 0.1989]],
[[-0.0382, -0.3123, -0.2031],
[ 0.1064, -0.1614, -0.0445],
[ 0.2439, 0.0400, 0.1464]],
[[-0.1872, -0.0308, -0.1206],
[-0.0614, 0.1198, 0.1462],
[ 0.1686, 0.1151, 0.0267]]],
[[[-0.0616, 0.1593, 0.2704],
[-0.1696, -0.2005, -0.1109],
[ 0.1210, 0.0383, -0.0340]],
[[-0.1182, 0.0891, 0.2877],
[ 0.0210, -0.2007, -0.1385],
[-0.2136, -0.1423, -0.1153]],
[[ 0.1690, 0.0690, 0.0726],
[-0.0839, 0.0590, -0.0182],
[-0.0523, 0.1589, -0.0620]]],
[[[-0.0506, -0.0271, 0.1220],
[ 0.1386, 0.1963, 0.1223],
[-0.2649, -0.2233, -0.0884]],
[[ 0.1545, 0.0640, 0.1527],
[ 0.1253, 0.0808, 0.0734],
[-0.0078, -0.2614, 0.0535]],
[[-0.0305, 0.0439, 0.0785],
[-0.1158, 0.0394, -0.0119],
[-0.1652, -0.1348, -0.2449]]],
[[[ 0.0969, 0.1163, -0.1365],
[-0.1002, 0.0590, -0.0026],
[ 0.1062, 0.0805, -0.2213]],
[[ 0.2701, -0.0937, -0.2425],
[ 0.0134, -0.1511, -0.1322],
[ 0.3078, 0.1953, -0.1587]],
[[ 0.1321, -0.0355, -0.0778],
[ 0.1624, -0.1177, -0.1811],
[ 0.0292, 0.1748, -0.0604]]],
[[[ 0.1152, -0.1728, -0.0294],
[-0.0224, 0.0416, -0.0264],
[ 0.1659, 0.1589, 0.1792]],
[[-0.0264, -0.0807, 0.1035],
[-0.0590, 0.0684, -0.0768],
[-0.1278, 0.1143, -0.0313]],
[[-0.1338, -0.0890, 0.1116],
[-0.1048, -0.1256, -0.1767],
[ 0.1900, -0.0091, 0.1583]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([ 0.1702, 0.2340, 0.1665, 0.1036, -0.2397, 0.0161, 0.2335, -0.2881],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[-0.0254, -0.1624, 0.0583],
[-0.1691, -0.2471, -0.1848],
[ 0.0363, -0.2744, -0.2515]],
[[ 0.0349, -0.0578, -0.0704],
[ 0.0531, -0.0112, -0.1606],
[ 0.1394, -0.1321, -0.0454]],
[[ 0.0328, -0.0567, 0.0728],
[-0.0915, -0.2274, -0.2761],
[-0.1471, -0.1877, -0.1096]],
...,
[[-0.1830, -0.1254, 0.0842],
[-0.0225, -0.0283, -0.1596],
[ 0.1380, 0.1283, -0.1385]],
[[ 0.2142, 0.1910, 0.0594],
[ 0.1104, 0.0313, 0.1585],
[-0.0014, 0.0460, 0.1572]],
[[ 0.0437, -0.0826, -0.0821],
[ 0.0696, 0.0575, -0.0111],
[ 0.0049, 0.0586, -0.1178]]],
[[[-0.0947, -0.0283, -0.0130],
[-0.1614, 0.0564, -0.0525],
[-0.0020, 0.0728, 0.0414]],
[[-0.1854, -0.1158, 0.0089],
[-0.1530, 0.0563, 0.0462],
[ 0.0040, -0.0545, 0.1603]],
[[-0.0039, 0.2092, 0.0496],
[ 0.1302, 0.0874, 0.0978],
[ 0.0932, 0.1697, 0.0066]],
...,
[[ 0.1608, 0.2227, 0.1551],
[-0.0643, -0.0332, 0.0584],
[-0.1553, -0.1259, 0.0596]],
[[-0.0715, -0.1513, 0.0607],
[ 0.0483, -0.0667, -0.0312],
[ 0.0224, 0.0697, 0.0644]],
[[-0.0819, -0.1269, 0.0404],
[-0.1411, -0.1501, -0.0967],
[-0.1365, -0.0151, -0.1138]]],
[[[ 0.1330, 0.0769, -0.1188],
[-0.0097, 0.0512, -0.1829],
[ 0.1001, -0.0841, 0.0136]],
[[ 0.2092, -0.0574, -0.1040],
[ 0.0143, -0.2083, -0.3089],
[-0.1714, -0.3834, -0.3515]],
[[-0.3034, -0.0772, -0.0375],
[-0.0434, 0.0590, 0.0288],
[ 0.0557, 0.1855, 0.1738]],
...,
[[-0.1088, 0.0918, 0.0733],
[ 0.0618, -0.0022, 0.0418],
[-0.0528, -0.1227, -0.0878]],
[[-0.0302, -0.0782, 0.0157],
[ 0.0090, -0.0786, -0.0205],
[ 0.0710, 0.1482, 0.1164]],
[[ 0.0938, 0.0724, 0.1114],
[-0.0052, 0.0179, 0.1027],
[-0.0344, 0.0298, 0.0757]]],
...,
[[[ 0.1129, -0.0166, -0.2911],
[ 0.2121, -0.0039, -0.1073],
[ 0.1014, 0.0853, -0.1510]],
[[ 0.0033, 0.0638, 0.0586],
[ 0.0400, 0.0012, -0.1266],
[ 0.0122, 0.0833, -0.0429]],
[[-0.0756, 0.1150, -0.0894],
[-0.0114, 0.0887, 0.0418],
[-0.1274, 0.0561, 0.1951]],
...,
[[-0.1248, -0.0540, -0.1837],
[ 0.1277, 0.1236, -0.0345],
[ 0.0596, 0.0626, 0.1117]],
[[-0.4631, 0.1867, 0.2866],
[-0.4068, -0.0367, 0.2236],
[-0.3901, 0.0619, 0.0453]],
[[-0.0904, -0.1033, 0.0214],
[-0.0328, -0.0457, 0.0213],
[-0.1051, -0.1068, 0.0696]]],
[[[ 0.0612, 0.0130, -0.0663],
[ 0.0488, 0.0306, -0.1045],
[ 0.1127, 0.0260, -0.0842]],
[[ 0.0094, 0.0485, 0.0713],
[ 0.0623, -0.0140, 0.0135],
[ 0.0237, 0.0088, 0.0391]],
[[-0.2975, -0.3776, -0.3118],
[-0.1723, -0.0102, -0.0337],
[-0.0888, -0.1804, 0.0982]],
...,
[[-0.2210, -0.1633, -0.0490],
[-0.1985, -0.1327, 0.0290],
[ 0.0658, 0.0849, 0.1946]],
[[-0.1684, 0.0107, 0.0702],
[-0.0659, -0.0408, 0.1083],
[-0.3007, 0.0121, -0.0574]],
[[-0.0855, -0.1076, -0.1168],
[-0.0080, 0.0864, -0.0253],
[-0.0258, -0.1281, -0.0047]]],
[[[-0.1291, 0.1212, 0.2008],
[-0.1482, -0.1511, -0.0130],
[-0.1573, 0.0297, -0.0405]],
[[-0.0669, 0.0550, -0.0618],
[-0.1164, -0.0760, -0.0789],
[-0.0407, 0.0310, 0.1849]],
[[-0.1014, -0.0505, 0.0498],
[-0.1507, -0.1117, -0.0990],
[ 0.0080, -0.1097, -0.0615]],
...,
[[-0.0681, -0.2296, 0.1364],
[-0.0906, 0.0033, 0.0265],
[ 0.2778, 0.0405, 0.0623]],
[[ 0.1557, 0.0296, 0.0163],
[ 0.0006, 0.0303, -0.0502],
[-0.0750, -0.1117, -0.0273]],
[[-0.0164, -0.0540, 0.0020],
[-0.1242, -0.0027, -0.0337],
[-0.0399, 0.0165, -0.0725]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([ 0.1042, -0.1738, 0.0574, 0.0903, 0.0392, 0.0278, 0.3411, -0.1872,
-0.0877, 0.1247, 0.0307, 0.2336, 0.1198, 0.0297, 0.3911, 0.0733],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[-4.7030e-02, -3.6367e-02, 8.9099e-02],
[-1.4464e-01, 8.3180e-02, 2.3603e-02],
[-2.7073e-01, -4.3660e-02, 1.4746e-01]],
[[-9.5810e-03, -6.3265e-02, -7.9249e-02],
[ 1.6250e-01, 1.3058e-01, 1.5185e-01],
[-1.0803e-01, -5.4190e-03, -3.1853e-02]],
[[ 3.3344e-02, 7.2252e-02, -2.4871e-01],
[-1.2570e-01, -2.0483e-01, -2.7122e-01],
[-1.2020e-01, -1.1796e-01, -1.6824e-01]],
...,
[[-1.8383e-02, 1.1176e-02, -1.7290e-01],
[-2.2465e-02, -7.5028e-02, -1.6513e-01],
[-1.6445e-01, -2.1756e-02, -1.1041e-01]],
[[-2.2118e-01, -1.7520e-01, 3.9316e-02],
[-9.3812e-02, -3.2787e-02, -2.3193e-02],
[-2.0678e-02, 1.6090e-01, 7.5263e-02]],
[[-5.5202e-02, 1.3115e-02, 2.4063e-01],
[-1.0364e-01, 1.3410e-01, 1.2165e-01],
[-9.6798e-02, 1.3772e-01, 2.2150e-01]]],
[[[-2.0237e-01, -3.2885e-01, -2.8030e-01],
[-1.2410e-01, -5.8700e-02, -5.3070e-02],
[-7.5027e-02, -2.0686e-01, -2.0435e-01]],
[[-4.2667e-02, 1.4850e-02, 4.1019e-02],
[-1.1324e-01, 3.4614e-02, -5.7245e-02],
[-1.6816e-01, -4.8411e-02, 7.1340e-02]],
[[-2.6383e-02, -1.0157e-01, -3.0806e-02],
[-1.7933e-01, -2.0048e-01, 1.2460e-01],
[-1.3200e-01, -5.6100e-02, 8.5593e-02]],
...,
[[-2.2688e-01, -8.0259e-02, -1.1414e-01],
[-1.1277e-01, -8.3798e-02, 1.7388e-01],
[-8.8173e-02, -8.2295e-02, 4.6153e-02]],
[[ 6.4484e-02, 8.1466e-02, 8.3050e-02],
[-2.3830e-02, 7.9335e-02, 1.5850e-01],
[ 2.6813e-03, 8.3955e-02, 5.5241e-02]],
[[ 3.8640e-04, -1.3516e-01, -1.1808e-01],
[ 5.4824e-02, -3.3826e-02, 1.1190e-02],
[-2.3826e-02, -7.3966e-02, -3.8884e-02]]],
[[[-4.3312e-02, -4.4430e-02, -2.4337e-02],
[-4.0017e-02, 5.6757e-02, -9.7253e-03],
[-7.9406e-02, -9.3139e-02, -6.1536e-02]],
[[ 3.9331e-02, -8.5690e-02, -9.0449e-02],
[ 4.3204e-02, -2.7504e-02, -2.1959e-02],
[-4.5773e-03, -8.5475e-02, 6.4375e-02]],
[[-7.9247e-02, -5.3575e-02, -8.8917e-02],
[-9.3003e-02, -1.1545e-02, -1.9939e-02],
[-2.1830e-02, -7.3200e-02, -5.5253e-02]],
...,
[[-8.7061e-02, -2.4786e-02, -1.6610e-02],
[-1.6420e-02, -1.0457e-01, -8.7883e-02],
[ 4.8648e-02, 3.5980e-02, -2.7588e-02]],
[[-3.5195e-02, -8.4780e-02, -1.5277e-02],
[ 2.8956e-02, 4.4772e-02, 2.1444e-02],
[ 2.2371e-03, 1.6581e-02, -8.4407e-02]],
[[-9.6186e-02, -5.9021e-02, 1.7208e-02],
[-4.0200e-02, -1.3014e-01, -1.9526e-02],
[ 2.6069e-02, 1.5292e-02, 5.6182e-03]]],
...,
[[[ 6.0325e-03, -7.9205e-02, -4.0931e-02],
[ 1.5316e-01, -1.0194e-01, -1.3400e-02],
[-1.6575e-04, -5.8222e-02, -3.9488e-02]],
[[-2.9887e-01, 2.6687e-02, -1.1059e-01],
[-1.8601e-01, 1.9512e-02, 1.1191e-02],
[-3.2236e-02, -1.2984e-02, 3.4036e-02]],
[[ 1.1181e-01, 5.3773e-02, -5.1682e-02],
[ 6.6411e-02, -1.5198e-01, 1.2188e-01],
[ 1.2369e-01, -1.3581e-01, 6.3276e-02]],
...,
[[ 1.4406e-01, -1.5961e-01, 3.8365e-02],
[ 1.4345e-01, -3.6647e-01, 7.3236e-02],
[-4.3458e-03, -6.2717e-02, 1.0923e-01]],
[[-2.1168e-01, -9.2100e-02, 5.8451e-02],
[-2.0508e-01, -2.6552e-02, 2.3333e-01],
[-1.4977e-01, 1.1953e-01, 2.5656e-01]],
[[ 3.6001e-02, -6.3785e-02, 5.6530e-03],
[ 1.3454e-02, 5.0647e-03, -5.9277e-02],
[ 1.9583e-02, 3.2092e-02, -4.4262e-02]]],
[[[-9.2988e-02, -2.5347e-01, -1.6597e-01],
[ 1.3102e-01, 5.7345e-02, -1.7439e-01],
[-3.9005e-02, -6.8647e-02, -1.7591e-01]],
[[-3.8740e-02, -6.5974e-02, -1.4275e-01],
[-1.5237e-02, 2.7031e-01, 8.0366e-02],
[-1.6345e-01, -6.8199e-02, -1.0544e-02]],
[[-6.0445e-02, 1.0119e-01, -2.1141e-02],
[-3.4609e-02, -7.1899e-02, -4.4866e-02],
[-2.6020e-03, -2.9400e-01, -8.9325e-03]],
...,
[[-2.1661e-03, 2.4133e-01, 9.6310e-02],
[ 8.1491e-02, -5.3824e-02, 5.8017e-02],
[ 1.1051e-01, -7.2774e-02, -4.3239e-02]],
[[-8.4466e-04, 1.9872e-02, -1.9164e-02],
[-4.0939e-02, -2.4496e-01, 6.9606e-03],
[ 8.4647e-02, 5.9431e-02, 4.3189e-02]],
[[-1.9576e-01, -2.8479e-01, -6.7556e-02],
[ 1.0308e-01, -6.3688e-02, -2.8762e-01],
[-1.2060e-01, -1.8001e-01, -3.8286e-01]]],
[[[ 7.1438e-02, -1.2422e-02, -7.2766e-04],
[ 9.4723e-02, 1.0175e-01, -3.5353e-03],
[-4.4114e-03, -2.2651e-04, 1.1817e-01]],
[[-2.4475e-01, -1.5695e-01, -1.4183e-01],
[-6.1665e-02, 8.6170e-02, 7.9943e-02],
[-2.0695e-01, 2.9464e-02, -5.4591e-02]],
[[ 3.8366e-02, 1.7972e-01, 7.5168e-02],
[-1.1113e-02, 1.1190e-01, -1.2572e-01],
[ 2.5581e-02, -7.4311e-02, 4.8680e-02]],
...,
[[ 4.2125e-02, 1.8082e-01, 1.1838e-01],
[ 3.5022e-02, -4.1830e-02, -5.7385e-02],
[-3.3644e-02, -7.4181e-02, 8.9787e-02]],
[[ 4.6581e-02, -8.4575e-03, -1.1224e-01],
[-5.6654e-02, -2.7147e-01, -1.8138e-01],
[-9.3615e-02, -1.1461e-01, -3.4755e-02]],
[[-1.6206e-01, -1.1957e-01, -3.7639e-02],
[-1.4028e-01, 4.0032e-02, 7.3189e-02],
[-6.4381e-02, 2.6821e-02, 1.1297e-01]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([ 0.2024, 0.0660, -0.1085, 0.1295, 0.1567, 0.0949, 0.0770, 0.1637,
-0.0146, 0.2334, 0.1670, -0.1113, 0.0821, 0.1620, 0.1248, 0.0618,
0.0417, -0.3643, 0.1679, -0.0555, -0.1273, 0.1941, -0.0098, 0.1652,
-0.0959, -0.0150, 0.2465, 0.1448, -0.0613, 0.1252, 0.1976, 0.1327],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[ 6.9009e-02, 1.1462e-01, 1.9725e-01],
[ 1.0977e-01, 9.0513e-02, 1.8843e-01],
[ 4.3901e-04, -2.4942e-03, 6.3833e-02]],
[[-8.8804e-02, -1.5081e-01, -4.5269e-01],
[ 1.5612e-01, 1.1230e-01, -2.2262e-02],
[ 1.3227e-03, 5.9894e-02, -1.5201e-02]],
[[ 1.8621e-02, 1.1661e-02, 1.1658e-02],
[-4.9895e-02, 2.1308e-02, 3.9631e-02],
[ 2.0269e-02, 2.5563e-02, -2.3864e-02]],
...,
[[-1.4599e-02, -2.9266e-01, -1.4289e-02],
[ 3.9900e-02, -6.3848e-02, -1.2033e-01],
[-5.1922e-03, 1.5117e-01, -1.5972e-01]],
[[ 7.8635e-02, -8.2026e-02, -1.1734e-01],
[ 1.4080e-01, -7.2381e-02, -2.0061e-01],
[ 2.4453e-03, 6.1407e-02, -1.1803e-01]],
[[-2.7402e-02, 1.1827e-02, -6.3272e-02],
[ 6.0142e-02, -5.2077e-02, 2.4844e-03],
[ 9.1823e-03, -1.5066e-02, 2.3948e-02]]],
[[[ 2.2598e-02, 2.4878e-04, 1.4044e-04],
[ 1.0791e-02, -1.4203e-02, 9.5458e-03],
[-6.7344e-02, 1.9074e-02, -1.4695e-02]],
[[-1.0408e-01, 2.5880e-02, -5.7053e-03],
[-8.0816e-02, -2.3992e-02, 6.0407e-02],
[-9.8482e-02, -3.8661e-02, 1.7996e-02]],
[[ 4.9898e-02, -4.8001e-02, 3.2605e-02],
[-5.6019e-02, -2.2292e-02, -3.8876e-02],
[-4.2964e-02, -1.9258e-02, 3.9710e-02]],
...,
[[-8.1141e-02, -8.9312e-02, -1.5980e-01],
[-9.3417e-02, 2.1325e-02, -1.4483e-01],
[-1.0141e-02, 3.9903e-02, -7.0688e-02]],
[[ 2.3244e-02, 3.6512e-02, -1.6180e-02],
[-3.0178e-02, -4.9807e-02, -6.8048e-03],
[-4.4457e-02, -8.0419e-02, -1.6279e-03]],
[[-1.0351e-02, -8.2821e-02, 1.9822e-03],
[-2.6571e-02, 1.0833e-02, -5.7805e-02],
[-4.5659e-02, -3.3213e-02, -3.1356e-02]]],
[[[ 9.2622e-02, 4.1946e-02, -6.6501e-02],
[ 1.0914e-01, 8.5823e-02, 6.1521e-02],
[ 1.2696e-01, 1.0262e-01, -1.0052e-01]],
[[-1.8726e-01, -6.0102e-02, 6.5399e-02],
[-5.3150e-02, -1.6732e-02, 4.3678e-02],
[ 7.0788e-02, 1.5275e-01, 2.0741e-01]],
[[-4.5740e-02, -6.1042e-02, 2.1611e-02],
[-3.2623e-02, -3.7276e-02, 5.0580e-02],
[-4.7336e-03, 4.9157e-02, 3.8870e-02]],
...,
[[-8.5918e-02, -4.3365e-02, -1.9855e-02],
[-6.2975e-02, -2.2503e-01, -1.5161e-02],
[-1.0766e-02, -6.5048e-02, -6.9223e-02]],
[[-2.0657e-01, -1.0011e-01, -3.4953e-03],
[-1.3175e-01, -7.3037e-02, -5.0063e-03],
[ 1.6329e-01, 1.1291e-01, -5.3531e-02]],
[[-1.8363e-02, -6.3703e-02, -1.6270e-01],
[ 4.9157e-02, 4.3332e-02, -1.0913e-01],
[ 8.4306e-02, 3.3354e-02, -3.7736e-02]]],
...,
[[[-2.6563e-01, -2.0163e-01, -5.7454e-02],
[-7.6050e-02, -9.1068e-02, -1.3764e-01],
[ 1.2855e-01, 8.0251e-02, -4.2795e-02]],
[[-2.7890e-02, 9.6183e-02, 7.9747e-02],
[-7.2707e-02, -8.4313e-03, 5.2336e-02],
[ 2.6887e-02, 5.7295e-02, 1.7410e-01]],
[[-8.9221e-03, -6.3043e-02, -9.3731e-03],
[-5.9503e-02, 3.3762e-02, -1.7429e-03],
[ 5.9601e-03, 2.6769e-02, -2.7117e-02]],
...,
[[-6.2440e-03, 6.0522e-02, 3.5079e-02],
[-6.6597e-02, -1.6694e-01, -6.6943e-02],
[ 1.7273e-02, 9.6227e-02, 5.9893e-02]],
[[-2.2900e-01, -2.7033e-01, -1.7290e-01],
[-1.1387e-02, -1.1532e-01, -6.8983e-02],
[ 1.4771e-01, 9.0543e-02, 1.2197e-01]],
[[-2.0081e-01, 1.8956e-02, -8.6765e-02],
[ 4.5397e-02, -1.8921e-03, -6.1285e-02],
[ 1.7161e-01, 1.5516e-01, 3.3201e-02]]],
[[[ 2.5673e-02, -9.8468e-02, 3.1704e-02],
[ 4.8893e-02, -3.5880e-02, -5.8408e-02],
[ 1.2059e-01, 4.7779e-02, -7.4807e-02]],
[[ 1.7772e-01, 9.7715e-02, 1.9568e-01],
[ 4.1162e-02, 1.0342e-01, 1.0738e-01],
[ 3.0417e-02, 8.3514e-02, 1.8919e-01]],
[[ 2.5829e-02, -1.8185e-02, 4.6962e-02],
[ 1.1599e-02, -4.9675e-02, -4.6516e-02],
[-7.7226e-02, -1.2456e-02, -2.0423e-02]],
...,
[[-8.8589e-02, 3.3292e-02, 1.5766e-01],
[-1.4954e-01, -5.1903e-02, 1.1310e-01],
[-8.2925e-02, -3.8674e-02, 1.1089e-01]],
[[-8.9427e-02, -6.2615e-02, -5.7916e-03],
[-7.3082e-02, -4.6811e-02, 4.6312e-02],
[ 4.5401e-02, 3.2886e-02, 4.5905e-02]],
[[-4.6811e-02, -6.6842e-02, 1.4480e-01],
[ 2.1874e-02, 1.7733e-02, 9.8101e-02],
[-6.2547e-02, -6.7476e-02, 2.9218e-02]]],
[[[-4.1696e-02, 4.2480e-02, -2.2099e-02],
[-6.6569e-02, -3.0954e-03, -3.6130e-02],
[-6.5596e-02, -2.2826e-02, -1.0680e-01]],
[[ 1.0331e-03, 3.8033e-02, -2.4720e-03],
[-6.2029e-03, -1.8558e-02, 2.9589e-03],
[ 2.2983e-02, 6.0779e-02, 1.3078e-02]],
[[-1.1889e-02, -6.1492e-02, 3.1117e-02],
[ 6.0491e-02, -6.2714e-02, 4.6257e-02],
[ 2.5172e-02, -6.5377e-02, 2.5211e-02]],
...,
[[-6.5732e-02, -5.8456e-02, -5.7695e-02],
[-2.6122e-02, -6.4552e-02, 7.9079e-04],
[ 1.0546e-02, -6.2628e-02, -2.1226e-02]],
[[-5.5431e-02, 2.7891e-02, -3.5145e-02],
[ 1.2740e-02, -5.8356e-02, -3.0334e-02],
[-1.3814e-02, -4.4704e-02, -7.3020e-04]],
[[-4.8293e-02, 8.0770e-03, 1.6076e-02],
[-2.5894e-02, -2.1912e-02, -8.1653e-02],
[-2.9177e-02, -1.1550e-02, -1.1512e-02]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([ 0.1028, -0.0959, -0.0420, 0.0215, -0.0041, 0.1933, 0.1869, 0.0852,
0.0424, -0.0728, 0.0133, 0.0512, 0.0846, -0.1801, 0.1564, 0.0151,
0.0018, 0.0415, 0.1906, 0.1596, -0.0055, -0.0205, 0.1077, 0.2279,
0.1694, 0.0037, -0.0751, -0.0745, 0.1922, 0.0815, -0.0861, -0.0537],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[-0.0268, -0.0785, 0.1224],
[-0.0990, -0.1087, -0.0833],
[ 0.1437, 0.1913, -0.0249]],
[[ 0.0402, 0.1333, -0.0436],
[-0.0054, 0.0275, 0.0833],
[ 0.0213, -0.0444, -0.1093]],
[[ 0.2274, 0.0954, 0.2295],
[ 0.1305, -0.0850, 0.0288],
[ 0.0340, 0.1453, 0.0259]],
...,
[[ 0.3021, 0.1370, -0.0708],
[ 0.2373, 0.0412, -0.0312],
[ 0.0629, -0.1243, -0.0978]],
[[-0.0887, 0.0858, 0.0271],
[ 0.0204, -0.0706, -0.1943],
[ 0.1524, 0.0188, -0.0989]],
[[-0.0209, 0.0499, 0.0305],
[ 0.0397, 0.0153, 0.0371],
[ 0.0835, -0.0248, 0.0354]]],
[[[-0.0833, -0.0226, -0.0380],
[ 0.0932, 0.0370, -0.0093],
[ 0.0938, 0.1039, -0.0066]],
[[ 0.0143, -0.0126, -0.0554],
[ 0.0681, 0.0764, 0.0493],
[ 0.0591, 0.0679, 0.0526]],
[[-0.4101, -0.1496, 0.0674],
[-0.0286, 0.0542, 0.1192],
[ 0.1012, -0.0386, -0.2620]],
...,
[[ 0.0277, 0.1051, -0.1168],
[ 0.2385, 0.1591, -0.0690],
[ 0.1125, 0.0827, -0.2010]],
[[ 0.0099, 0.0593, -0.0054],
[-0.0344, 0.0983, -0.1356],
[ 0.0462, 0.0331, -0.0300]],
[[ 0.0415, 0.0317, -0.0360],
[ 0.0674, 0.0765, 0.0376],
[ 0.0411, 0.0711, -0.0324]]],
[[[ 0.0621, -0.0828, -0.1253],
[ 0.0334, -0.1126, 0.0087],
[-0.1496, -0.2244, 0.1080]],
[[ 0.0243, -0.0277, -0.0246],
[-0.0276, 0.0339, 0.0108],
[ 0.0073, 0.0206, 0.0672]],
[[-0.0384, -0.1288, -0.0677],
[-0.0756, -0.1732, -0.1043],
[-0.1420, -0.1997, 0.0533]],
...,
[[ 0.1855, 0.0507, -0.0565],
[ 0.1828, -0.1173, -0.3060],
[ 0.1039, -0.2304, -0.2713]],
[[-0.1228, -0.0164, -0.0152],
[-0.1267, -0.0944, -0.0638],
[-0.2069, -0.1083, 0.0570]],
[[-0.0126, 0.0067, -0.0088],
[ 0.0656, 0.0054, -0.0330],
[-0.0551, 0.0023, 0.0384]]],
...,
[[[-0.1849, 0.0078, -0.0043],
[-0.1367, 0.1726, 0.1515],
[-0.0572, 0.0714, 0.1246]],
[[ 0.0357, 0.0344, -0.0013],
[ 0.0162, 0.0700, 0.0339],
[ 0.0161, -0.0111, -0.0289]],
[[-0.2430, -0.1622, 0.0225],
[ 0.0290, 0.0119, 0.0070],
[ 0.0822, -0.1112, -0.1278]],
...,
[[-0.1592, -0.0278, -0.2067],
[ 0.1225, 0.0084, -0.1866],
[ 0.2418, 0.0807, -0.0442]],
[[ 0.1283, 0.0776, -0.1201],
[ 0.0849, 0.1296, -0.0687],
[-0.0369, -0.0395, -0.0447]],
[[ 0.0072, -0.0290, 0.0019],
[-0.0477, 0.0456, 0.0466],
[ 0.0120, 0.0572, -0.0614]]],
[[[ 0.1043, 0.0068, -0.1981],
[ 0.0623, 0.0200, -0.1307],
[ 0.0320, 0.0998, -0.0400]],
[[ 0.0867, 0.0595, 0.0684],
[ 0.0155, -0.0367, -0.0764],
[ 0.0077, -0.0725, 0.0708]],
[[-0.0093, 0.0486, 0.0699],
[-0.1316, -0.0262, 0.1589],
[-0.1874, -0.1135, 0.1413]],
...,
[[-0.0724, 0.1626, 0.0059],
[-0.0534, 0.0273, 0.0461],
[-0.1126, -0.0187, -0.0411]],
[[-0.0563, -0.0394, 0.0233],
[-0.1509, -0.1927, 0.1323],
[-0.1104, -0.0986, 0.0575]],
[[-0.0343, -0.0390, -0.0322],
[-0.0215, -0.0141, -0.0624],
[-0.0236, -0.0177, 0.0239]]],
[[[ 0.0520, 0.0876, -0.0294],
[ 0.1184, 0.1894, 0.1214],
[-0.0744, 0.1189, 0.1215]],
[[ 0.0071, 0.1144, 0.0660],
[-0.0189, 0.0268, 0.0872],
[ 0.0113, -0.0471, -0.0182]],
[[-0.0030, 0.0214, -0.0771],
[-0.1429, 0.0364, -0.0281],
[-0.0403, 0.0063, 0.0495]],
...,
[[ 0.1176, -0.1187, -0.1182],
[-0.0966, -0.2929, -0.1685],
[ 0.1257, -0.1227, 0.0128]],
[[-0.1392, -0.2439, -0.1981],
[ 0.1200, 0.0282, -0.0170],
[-0.0734, -0.0202, 0.0223]],
[[ 0.0047, 0.0079, -0.0127],
[-0.0476, 0.0227, -0.0787],
[ 0.0046, -0.0079, -0.0624]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([-0.0876, -0.0237, 0.0853, 0.1023, 0.2202, 0.1032, -0.0716, -0.2067,
-0.1237, 0.1519, -0.1150, 0.1363, 0.2626, 0.0713, 0.0382, -0.0170,
0.2217, 0.1144, 0.1320, 0.0928, 0.1682, 0.0652, 0.1499, -0.0967,
-0.1216, -0.0739, 0.2121, -0.0713, 0.2535, 0.0720, -0.0723, 0.1187],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[ 1.5750e-02, -1.8802e-02, -1.0151e-01],
[ 3.1865e-03, -1.4502e-01, -5.3737e-02],
[ 1.1120e-01, 2.3159e-01, 1.9758e-01]],
[[-9.6277e-02, -1.3546e-02, -1.6304e-02],
[-1.7757e-01, -2.1907e-02, -1.0722e-01],
[-2.2258e-02, 1.6865e-02, 1.1611e-01]],
[[ 1.0469e-02, -1.9607e-01, -1.4850e-01],
[-1.8689e-02, -1.6843e-01, 3.4829e-02],
[-2.8844e-02, -6.8170e-03, 4.2777e-02]],
...,
[[-1.0267e-01, 7.0981e-02, 5.7194e-03],
[-3.9713e-02, 2.8897e-02, 7.6243e-02],
[ 1.3284e-01, 7.9973e-02, -3.0974e-02]],
[[-1.1654e-01, 1.8126e-02, 1.0699e-01],
[-1.1617e-01, -4.5680e-02, -2.0778e-02],
[-6.6526e-02, -8.3618e-02, 3.0532e-02]],
[[-1.1270e-01, -1.3082e-01, 1.6734e-01],
[-3.1644e-01, -1.0295e-01, 1.6494e-01],
[-8.7194e-04, 1.2088e-01, 4.6809e-02]]],
[[[-9.4082e-02, -2.6336e-02, 9.0989e-02],
[-1.0235e-01, 1.0079e-01, 1.2587e-01],
[-1.5505e-01, 3.2032e-02, 6.9001e-02]],
[[-7.6306e-02, -5.3762e-02, 1.1543e-01],
[-3.0904e-02, 5.6640e-02, 5.8483e-02],
[ 7.4004e-02, 3.8447e-02, 1.0649e-01]],
[[ 1.2385e-01, -1.2264e-01, -1.1571e-01],
[ 3.5694e-02, 4.4629e-03, -1.4114e-01],
[-1.6586e-02, -1.3506e-01, -4.6718e-02]],
...,
[[ 1.6005e-01, 8.6114e-02, 1.7866e-01],
[ 1.1557e-02, 3.0728e-02, 2.0408e-01],
[ 4.8107e-02, -6.4240e-03, 7.5953e-02]],
[[ 6.9605e-02, -5.3238e-02, -1.4370e-01],
[ 1.1002e-01, 7.5640e-02, 1.3693e-02],
[ 6.7915e-02, 1.0974e-02, 8.0762e-02]],
[[-1.8051e-01, -1.5862e-01, -6.9860e-02],
[-1.1071e-01, -1.9900e-01, -1.2483e-01],
[ 7.5649e-03, -9.8222e-02, -6.3080e-02]]],
[[[ 9.9826e-02, -1.2355e-02, -1.8245e-01],
[ 8.7192e-02, 1.0602e-01, -2.2730e-01],
[ 1.1862e-01, 2.7324e-02, -4.8670e-01]],
[[ 6.9264e-02, -2.0319e-01, 4.6244e-02],
[ 3.4824e-02, -8.4152e-02, -1.8886e-01],
[ 2.0105e-01, -6.7456e-03, -3.5289e-01]],
[[-7.1351e-02, -1.2104e-01, -1.2884e-01],
[-7.1407e-03, -8.7524e-03, -8.8229e-02],
[ 8.1751e-02, 1.1625e-01, 2.6605e-02]],
...,
[[-4.9645e-02, -1.1872e-01, -4.1919e-03],
[-5.0326e-02, -1.1282e-01, -3.5107e-02],
[ 1.1888e-01, 5.2949e-02, 7.6544e-02]],
[[-1.0234e-01, -3.1507e-02, 2.1784e-02],
[-1.3511e-01, 4.3913e-03, 6.0062e-02],
[-1.6103e-01, 5.2468e-03, -2.3413e-02]],
[[-4.5564e-02, -1.4474e-01, -1.2457e-01],
[ 3.5944e-02, 2.5066e-02, 1.2988e-02],
[ 1.3099e-01, 1.2616e-01, 2.2058e-01]]],
...,
[[[-2.2477e-01, 2.2143e-01, -6.9398e-02],
[-1.3874e-01, -7.4640e-02, -1.5825e-01],
[-1.5588e-02, -1.3330e-01, 1.2538e-01]],
[[-1.7911e-01, 6.0851e-02, 4.9589e-02],
[-1.1099e-01, -1.2606e-01, -1.1246e-01],
[-2.8315e-02, -9.7025e-02, 4.0198e-02]],
[[-2.6796e-01, -1.5530e-01, 1.1485e-01],
[ 1.0755e-01, -6.6851e-03, -1.1502e-02],
[ 1.1683e-01, -1.4514e-01, -1.1253e-01]],
...,
[[ 1.5992e-02, -1.9510e-01, -1.3435e-01],
[ 1.5778e-01, 3.4936e-02, -1.3136e-01],
[ 1.3228e-01, -4.6336e-02, -1.2755e-01]],
[[-4.6308e-02, -5.3111e-02, 1.4411e-01],
[-2.7537e-02, -9.7336e-02, 3.4823e-02],
[ 6.1922e-02, 3.8614e-02, 8.0130e-02]],
[[ 1.3406e-01, -3.7805e-03, -1.4912e-01],
[ 2.1447e-01, 1.3587e-01, -1.2820e-01],
[ 1.6850e-01, 3.0535e-02, -2.6715e-01]]],
[[[ 7.8183e-02, 1.5728e-01, -4.0903e-02],
[ 1.1511e-01, -1.0044e-01, -2.3213e-01],
[-7.5702e-02, -1.0027e-01, 5.5332e-02]],
[[ 1.4051e-01, 1.7742e-01, -8.3843e-02],
[-2.5493e-02, -3.7718e-02, -2.1883e-01],
[ 1.2879e-02, 6.1207e-02, -8.0445e-03]],
[[ 3.0396e-02, 7.8809e-02, 7.0814e-02],
[ 2.3849e-02, 3.2792e-03, -1.4557e-02],
[-8.3502e-02, -8.4839e-02, -1.6589e-01]],
...,
[[ 1.0456e-01, 6.5999e-02, 1.1264e-02],
[ 4.8753e-02, -2.7002e-02, -1.0605e-02],
[ 7.6261e-02, -3.9704e-02, -9.1257e-05]],
[[ 9.1233e-02, 7.5512e-02, -4.8948e-02],
[ 2.3229e-02, 5.4622e-02, 3.0258e-02],
[ 1.1454e-01, 7.3127e-02, 2.8436e-02]],
[[ 1.3214e-01, 2.1240e-01, 7.2187e-02],
[ 6.4263e-02, 4.8651e-02, -9.9604e-03],
[ 1.1715e-01, 9.8350e-02, 2.8916e-02]]],
[[[-1.0512e-01, -3.4200e-02, 1.3353e-01],
[-3.6263e-02, -3.2307e-02, 1.3730e-01],
[-1.1905e-02, -2.7923e-02, 6.9215e-03]],
[[-9.1355e-02, -1.6643e-01, -2.4538e-01],
[-5.9670e-02, -1.6845e-01, -1.1152e-01],
[-8.1712e-02, -1.4124e-01, 1.2262e-02]],
[[-2.0247e-01, -2.0532e-01, -4.5459e-02],
[ 1.3070e-02, 9.8757e-02, 1.6153e-01],
[ 1.0632e-01, 1.1582e-01, 4.5588e-02]],
...,
[[-7.4177e-02, -1.5520e-01, 9.4596e-02],
[-1.1714e-01, -1.6325e-01, 9.9501e-03],
[-6.9445e-02, -1.1002e-01, 7.7540e-02]],
[[ 1.2110e-01, -5.7913e-02, -4.7994e-02],
[ 1.3366e-02, -1.5410e-01, -7.3332e-02],
[-3.3940e-02, 2.5165e-02, 9.5372e-02]],
[[-3.2379e-02, -1.9575e-01, -2.4914e-01],
[-3.2206e-02, -1.3110e-01, 2.2402e-03],
[-2.3820e-01, -8.4464e-04, 1.3214e-01]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([ 0.0389, 0.1476, -0.0230, -0.0297, -0.1022, 0.0710, 0.0107, 0.0909,
0.0182, -0.1722, 0.2386, 0.0725, -0.0465, -0.0032, -0.0509, 0.1685,
-0.1314, 0.1986, -0.0520, 0.0977, 0.0624, -0.0470, 0.0545, 0.0449,
0.2857, 0.0663, -0.1563, 0.0916, 0.2890, 0.0104, 0.1193, -0.0377],
device='cuda:0')),
('fc_layers.0.weight',
tensor([[-9.4663e-02, 9.5497e-02, 3.1793e-02, ..., -2.4350e-02,
-6.7771e-02, -9.8752e-02],
[ 1.3137e-02, -2.3149e-02, 1.3668e-02, ..., 5.6907e-03,
-5.6442e-02, 1.6295e-02],
[-1.7471e-01, -2.5586e-03, 6.3889e-02, ..., -1.8111e-01,
-2.4564e-01, -5.2610e-02],
...,
[-6.2209e-02, 8.4102e-03, 3.9970e-04, ..., 1.4998e-02,
2.0363e-02, 1.5555e-02],
[-5.2813e-03, 1.5701e-02, -3.5947e-02, ..., 2.2849e-03,
-2.1385e-02, -9.0535e-05],
[-6.2392e-02, 3.7014e-03, -3.9616e-02, ..., -3.1742e-02,
-2.4267e-02, 1.7453e-02]], device='cuda:0')),
('fc_layers.0.bias',
tensor([-9.1414e-02, -8.8624e-02, 7.3776e-02, -2.3321e-02, -1.0116e-01,
1.2893e-01, 2.1958e-02, 4.9048e-03, -3.5985e-02, -4.6488e-02,
-3.5688e-02, -5.7674e-02, 3.3638e-02, -4.0000e-02, 7.9514e-03,
-2.7469e-02, -3.9972e-02, 1.4961e-01, 3.7115e-02, 1.0135e-02,
-4.2905e-02, 3.2251e-02, -2.1697e-02, -7.6372e-02, -4.8307e-03,
6.6540e-03, 3.0076e-02, -4.8077e-02, 1.4011e-01, -3.5937e-02,
1.1195e-01, -1.0829e-01, -2.6370e-02, 2.9505e-02, 1.4601e-01,
3.7112e-02, 1.7699e-02, 9.4877e-02, -7.8380e-02, 1.2336e-01,
6.9786e-02, 1.6557e-01, -4.0479e-02, 1.1718e-01, 6.4088e-02,
-4.1914e-02, -6.3180e-02, 1.7170e-01, 9.9393e-02, -1.7317e-02,
4.8475e-03, 2.6568e-02, 1.6790e-03, 1.7465e-01, -2.0560e-02,
-5.8284e-02, 1.0887e-02, -7.3413e-02, 5.8976e-02, -9.2862e-02,
-6.1907e-02, -5.3327e-02, 1.2203e-02, -1.7190e-02, -2.8950e-02,
-6.1919e-02, 8.5058e-03, -4.3873e-02, 9.1428e-02, 1.0059e-01,
2.2505e-01, 1.1579e-02, -1.1516e-02, -2.2798e-02, 3.3710e-05,
-8.8057e-02, -2.7881e-02, -3.1317e-02, -3.8823e-02, -7.7994e-02,
-4.4669e-02, -2.7030e-03, 1.6028e-01, 1.0444e-02, 5.1280e-02,
-5.0083e-02, 3.8864e-02, -3.1815e-02, 3.5173e-02, -4.3420e-02,
9.5220e-02, -1.0901e-01, 1.2081e-01, -4.7092e-02, -5.0987e-02,
7.9477e-03, -1.1311e-02, -5.7861e-03, 5.4357e-02, 3.0063e-03,
-1.0427e-02, -5.7481e-02, -4.7817e-02, -2.2958e-02, 7.7780e-02,
1.2979e-01, -3.3017e-02, 1.3307e-01, 1.0133e-02, 1.3848e-02,
8.2515e-02, -2.7850e-02, -1.4535e-02, -2.9807e-02, 9.2706e-02,
-4.6336e-02, 1.3011e-03, 1.1088e-02, -1.1319e-01, -7.1764e-02,
-5.8874e-03, 5.1797e-02, -3.8091e-03, 1.0716e-01, 1.8952e-01,
-1.4818e-02, 1.5193e-02, 4.8129e-02, -3.7256e-02, 1.8346e-02,
6.2535e-02, 6.9317e-02, -6.6003e-02, -2.2472e-02, 2.0358e-01,
9.5855e-02, -2.8719e-02, 2.1559e-02, 1.4117e-01, -3.4460e-02,
-2.7173e-02, 1.7401e-02, -6.6341e-02, 1.9295e-01, -1.2479e-01,
2.0093e-01, -7.8052e-02, -5.5412e-02, -7.0529e-02, 1.0303e-01,
-5.8742e-02, -5.1087e-03, -2.1735e-02, -7.4418e-03, -5.4824e-02,
-3.1869e-02, -9.2274e-02, 1.8646e-01, -6.0921e-02, -3.6070e-02,
-5.2206e-02, 1.2995e-01, 9.7558e-04, -3.4241e-02, -1.9872e-02,
-4.0055e-02, -8.7418e-02, 6.6316e-02, 1.5736e-01, -5.0828e-02,
2.2196e-01, -6.7428e-02, -5.3751e-03, 5.8274e-02, 2.4656e-01,
-4.8600e-02, -1.6606e-02, -4.1645e-03, -6.5163e-02, -1.0605e-02,
8.2341e-02, -6.0412e-02, 9.0210e-03, -4.2418e-03, 1.1914e-01,
-3.4105e-02, -4.1944e-02, 1.5560e-01, 9.0093e-02, 7.5693e-02,
6.3546e-02, -1.8087e-02, 8.9562e-02, 2.2900e-01, 4.7165e-03,
-8.8096e-02, -4.7713e-02, -3.4837e-02, 1.2233e-01, -4.3189e-02,
-1.3408e-02, -2.3065e-02, -4.4044e-03, -1.1149e-02, 3.1791e-02,
1.7384e-01, -5.1812e-02, 1.4904e-01, -2.5286e-02, -8.1529e-02,
-4.8444e-02, 4.7689e-03, 3.0993e-03, 3.9768e-03, 1.4270e-01,
-3.0248e-02, -6.5284e-02, -3.8894e-02, -3.2412e-02, 1.4487e-02,
-6.9987e-02, 2.6042e-02, -3.0099e-02, 2.0947e-02, 1.5522e-01,
-9.4173e-02, -1.3143e-02, -2.4128e-03, -3.9204e-02, 3.3634e-01,
-2.9417e-02, -2.9749e-02, 1.4871e-02, 1.8366e-01, -5.1646e-02,
-3.8814e-02, 1.0227e-02, -6.0535e-02, -2.9910e-02, 1.4457e-01,
-2.6880e-02, 7.8629e-02, 5.2561e-02, 4.1361e-02, -8.2938e-04,
-5.9014e-02, -2.0781e-02, 3.0886e-02, -1.1336e-02, 8.4235e-02,
1.1971e-01, -7.8338e-02, -1.6300e-02, -1.4978e-02, -6.5970e-03,
-4.5928e-02], device='cuda:0')),
('fc_layers.2.weight',
tensor([[ 0.0399, 0.0626, -0.0888, ..., 0.0377, -0.0223, -0.0102],
[ 0.1223, 0.0479, 0.0770, ..., 0.0170, 0.0180, 0.0178],
[ 0.0085, -0.0081, -0.0554, ..., 0.0068, 0.0426, 0.0188],
...,
[-0.0060, 0.0445, -0.1284, ..., -0.0074, 0.0528, -0.0187],
[-0.1314, 0.0722, 0.1353, ..., 0.0081, 0.0145, 0.0448],
[-0.0589, 0.0175, -0.1076, ..., -0.0135, -0.0082, -0.0508]],
device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.0372, 0.0637, 0.1666, 0.0687, 0.0890, -0.0532, -0.1050, -0.1449,
-0.1009, -0.1428], device='cuda:0'))])},
{'ratio': 0.42,
'bias': 32,
'train_losses': [283.3235830661514,
236.67709525010142,
203.37381090335612,
186.30181293670628,
178.08212785196554,
170.8841159327076,
167.98599552712932,
164.66951065038512,
160.9251620380458,
159.46911404290955,
157.68119042123593,
155.53576891438917,
154.8015234977995,
152.91829794156405,
152.81556887651612],
'test_losses': [264.48681960386386,
216.65695395189172,
190.39157623403213,
178.29552667281206,
171.98474488538855,
167.42815616551567,
163.43403333776138,
161.21997627557494,
156.68799793486502,
156.08563019247617,
153.60338458827897,
152.69217450945985,
152.46044002794753,
150.9840357911353,
155.1455456976797],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[-0.0374, -0.0630, 0.0023],
[-0.1208, 0.0321, 0.2577],
[-0.1596, 0.0233, 0.0789]],
[[-0.1189, -0.0121, 0.2247],
[-0.0411, -0.0362, 0.1456],
[-0.3227, -0.0513, 0.1786]],
[[ 0.0351, -0.0560, 0.0253],
[-0.2322, -0.0759, 0.1472],
[-0.1101, 0.0627, 0.1935]]],
[[[-0.2399, -0.1127, -0.0747],
[ 0.1293, -0.1930, 0.0356],
[ 0.1789, 0.0589, 0.1546]],
[[-0.2144, -0.2552, -0.0693],
[-0.0209, -0.0357, 0.0772],
[ 0.2150, 0.0779, 0.2136]],
[[-0.0551, -0.1039, 0.0454],
[-0.0682, -0.0450, -0.0401],
[ 0.2008, 0.1572, -0.0730]]],
[[[-0.0414, -0.0346, -0.3274],
[ 0.2458, 0.1557, 0.0874],
[ 0.1777, -0.0734, -0.1870]],
[[ 0.2000, -0.1645, -0.1538],
[ 0.1832, -0.2020, -0.1092],
[ 0.3076, 0.0805, -0.1465]],
[[-0.0652, 0.0551, -0.0159],
[-0.0215, 0.0639, -0.2384],
[ 0.0736, 0.0758, 0.1102]]],
[[[-0.1581, 0.0906, -0.0056],
[ 0.1355, -0.1212, 0.1754],
[ 0.2119, 0.1359, -0.2258]],
[[ 0.0925, -0.0683, 0.1021],
[ 0.2238, 0.0511, -0.2365],
[ 0.0477, -0.0328, -0.1012]],
[[ 0.0737, -0.1097, -0.0276],
[-0.0621, -0.0878, -0.0983],
[ 0.2979, -0.0924, -0.2328]]],
[[[-0.0732, 0.0115, 0.0752],
[ 0.0592, 0.1632, 0.1477],
[-0.3222, -0.0767, 0.0453]],
[[-0.1214, 0.2202, -0.1283],
[ 0.1986, 0.3519, -0.0623],
[ 0.0256, -0.0042, -0.0699]],
[[-0.0427, -0.1566, 0.0646],
[-0.0297, 0.1270, -0.0379],
[-0.0358, -0.0773, -0.2061]]],
[[[-0.0833, -0.0076, 0.2150],
[-0.0065, -0.1255, -0.1425],
[ 0.0656, -0.0196, 0.1473]],
[[-0.0067, -0.0400, -0.0017],
[-0.2789, -0.2993, 0.0239],
[-0.0939, 0.0066, 0.1149]],
[[-0.0028, 0.0424, 0.1444],
[-0.0025, 0.1346, 0.1574],
[-0.0765, 0.1296, -0.0737]]],
[[[ 0.1198, 0.1534, -0.0166],
[-0.0004, -0.0210, 0.0758],
[ 0.0013, -0.0641, -0.2253]],
[[ 0.2364, 0.2899, 0.2610],
[-0.1287, -0.1993, -0.0253],
[-0.1777, -0.2484, -0.0850]],
[[ 0.1249, -0.0156, 0.0301],
[ 0.1639, -0.0601, 0.1051],
[-0.0695, -0.1111, -0.0647]]],
[[[-0.2237, -0.1399, 0.0947],
[ 0.1076, 0.0318, -0.0424],
[ 0.1184, 0.1630, 0.0508]],
[[-0.2695, -0.1308, 0.2555],
[-0.3494, -0.0093, 0.2095],
[ 0.0808, 0.1063, 0.0807]],
[[ 0.0346, 0.0460, -0.0348],
[-0.1859, -0.0579, -0.0067],
[-0.0420, 0.0067, 0.2583]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([ 0.2248, 0.2203, 0.1833, -0.4610, 0.2436, -0.1269, 0.2256, -0.3395],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[ 6.2517e-02, 1.6312e-01, 6.8619e-02],
[ 1.0250e-01, 1.5446e-01, 1.6559e-01],
[-3.2527e-02, 9.7941e-02, 7.7318e-02]],
[[ 9.2348e-02, -1.6637e-02, -4.1096e-02],
[ 9.9725e-02, 5.7629e-02, -4.7659e-02],
[ 7.9085e-02, -9.7208e-02, -1.1190e-01]],
[[ 9.9810e-02, -8.6540e-02, -1.2761e-02],
[ 1.1574e-01, -2.4980e-01, -1.6891e-01],
[ 1.1139e-01, -1.5877e-01, -9.0772e-02]],
...,
[[-1.3980e-01, -2.0667e-01, -7.5681e-02],
[-9.4524e-02, 2.1651e-02, -2.8898e-02],
[-5.4399e-02, -2.0171e-01, -1.6159e-03]],
[[-6.5671e-02, -2.6893e-02, -1.0422e-01],
[ 6.2904e-02, -6.0046e-02, -8.1825e-02],
[-9.0429e-02, -2.0408e-02, 1.1383e-01]],
[[-1.0658e-01, -1.3347e-01, -1.3823e-01],
[-1.3087e-01, 1.6734e-02, 4.7224e-02],
[-1.4979e-01, -9.3753e-02, -1.7354e-01]]],
[[[-1.4661e-02, -4.6836e-02, 1.4043e-01],
[ 2.8962e-02, 3.3738e-03, -1.8827e-02],
[-6.0577e-03, -2.6305e-02, 2.6345e-02]],
[[ 1.3501e-01, 2.6512e-01, 1.2902e-01],
[ 7.7108e-03, 7.0096e-02, 1.3267e-01],
[-1.3181e-02, 1.6294e-01, 1.8590e-02]],
[[-5.9977e-02, -1.7944e-01, 9.2564e-02],
[ 1.0720e-01, -4.9210e-02, -3.1420e-02],
[ 2.1673e-02, 1.7192e-02, 1.2530e-01]],
...,
[[ 7.5039e-02, 1.0490e-01, -6.8445e-03],
[-1.4636e-01, -1.9593e-01, -2.7958e-02],
[-1.1363e-01, -2.3453e-02, -1.0205e-02]],
[[-1.6558e-01, -3.0842e-01, -2.6963e-01],
[-1.8154e-01, -3.0284e-01, -2.5014e-01],
[-7.3931e-02, -5.3349e-02, 1.1694e-01]],
[[ 4.2127e-02, -2.4390e-01, -8.2454e-02],
[-7.8559e-02, -2.5030e-01, -1.2915e-01],
[-3.6733e-02, -1.5432e-01, -1.6659e-01]]],
[[[ 2.3767e-01, 1.3912e-01, 8.8943e-02],
[ 1.9152e-01, 1.4901e-01, 1.1964e-01],
[ 1.8159e-03, -2.1800e-03, 4.3345e-02]],
[[ 7.9126e-02, -7.2152e-02, -7.6225e-02],
[-5.2814e-02, -2.7775e-01, -4.6605e-02],
[ 1.0387e-01, 3.9856e-02, -5.8115e-02]],
[[-2.4093e-01, -3.0393e-01, 9.2993e-02],
[-3.6242e-01, -3.1673e-01, -6.7500e-02],
[-1.5026e-01, -2.0928e-01, -3.6697e-03]],
...,
[[ 3.6465e-02, -2.7523e-02, -9.6472e-02],
[ 5.0287e-02, 1.5194e-02, -7.5509e-02],
[ 1.7414e-01, -2.1709e-02, 9.2979e-05]],
[[ 3.3904e-02, 7.5617e-03, 4.0962e-02],
[ 3.1233e-02, -9.6349e-02, -1.1341e-02],
[ 1.7951e-01, -5.2018e-02, 7.6427e-02]],
[[-1.2783e-01, -2.0080e-01, 1.0190e-01],
[-1.0519e-01, -1.9690e-01, -1.5036e-02],
[-1.7975e-01, -1.1157e-01, -9.1662e-02]]],
...,
[[[-1.3831e-01, -2.4445e-01, -1.0899e-01],
[ 1.9178e-02, -4.7342e-02, -8.3233e-02],
[ 2.2815e-03, -1.3661e-01, 6.8399e-02]],
[[ 1.2535e-01, -7.4764e-02, 9.0562e-02],
[-7.0113e-02, 4.3940e-02, -1.1586e-02],
[-2.5282e-02, 7.9998e-02, 1.8200e-02]],
[[-3.3782e-02, -2.6263e-02, 6.5147e-02],
[ 1.1079e-01, -3.7921e-02, 7.5528e-02],
[ 4.6621e-02, 1.0649e-01, 2.8167e-02]],
...,
[[ 7.4641e-02, 5.0653e-02, 4.2752e-02],
[-5.5892e-03, -2.6942e-02, 5.6441e-02],
[ 3.8819e-02, 4.9451e-02, -1.2463e-01]],
[[-1.1404e-01, -1.5110e-01, -3.6648e-02],
[-1.2181e-01, -4.4994e-02, 6.7837e-02],
[ 2.1792e-02, -1.2825e-01, -6.4887e-03]],
[[ 2.8379e-02, -3.2483e-02, 2.0134e-01],
[ 1.4808e-02, 4.2088e-03, 2.2445e-01],
[-1.9727e-02, -7.1084e-02, 9.0437e-02]]],
[[[ 7.9109e-02, -2.5874e-03, 2.8002e-02],
[-2.2465e-01, -4.5943e-02, -3.2918e-02],
[-1.1502e-02, -4.9750e-02, -6.8649e-02]],
[[-3.2077e-01, -3.3383e-01, -2.5381e-01],
[ 1.1852e-01, -6.5426e-02, -2.4273e-02],
[ 1.4508e-01, 9.7373e-02, 1.4000e-01]],
[[-8.4472e-02, -3.1312e-02, -2.0892e-01],
[ 4.3295e-03, 1.2600e-01, -1.1289e-01],
[ 5.6424e-02, 1.2475e-01, -5.4931e-02]],
...,
[[ 2.1013e-02, -8.9194e-02, 9.0633e-02],
[ 7.3571e-02, 8.2481e-02, -3.3641e-02],
[ 5.8530e-02, -5.6508e-02, 1.1858e-01]],
[[ 1.5412e-02, 7.4839e-03, 1.4769e-01],
[ 5.8651e-02, 1.4980e-01, 3.4941e-02],
[-7.7929e-02, -1.7174e-01, 5.5443e-02]],
[[-1.4191e-01, -2.1217e-02, -3.2066e-02],
[-1.0038e-01, -1.8880e-01, -1.9253e-01],
[-3.2408e-02, -2.0758e-01, -2.1639e-01]]],
[[[-7.4028e-02, -2.5105e-01, -6.5439e-02],
[-1.4828e-01, -2.7307e-01, -1.6084e-01],
[-6.3815e-02, -2.6414e-01, -3.5863e-02]],
[[ 2.0037e-02, 1.1769e-01, 4.1415e-02],
[-3.1133e-02, 6.1027e-02, 1.2240e-01],
[ 1.1734e-02, 1.4002e-01, 7.4535e-02]],
[[-5.1886e-02, 1.1003e-01, 8.3219e-02],
[-9.8891e-03, -2.2127e-02, 1.2714e-01],
[ 7.9625e-03, 1.8431e-01, 8.0425e-02]],
...,
[[ 1.3755e-01, 2.7165e-01, 2.0028e-01],
[-6.4393e-03, -3.7871e-02, 1.3211e-01],
[-1.6438e-02, -1.0716e-01, -3.1238e-02]],
[[-2.3876e-01, -2.0357e-01, 9.0453e-02],
[-3.3788e-01, -3.9009e-01, -2.2406e-01],
[-2.6260e-01, -1.5752e-01, -2.0374e-01]],
[[ 2.9589e-03, 2.7739e-01, 1.9378e-01],
[-7.5678e-02, -1.2546e-01, 1.1715e-01],
[ 1.2259e-01, 7.9994e-02, 7.2417e-02]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([ 0.1140, 0.2110, 0.0912, 0.2865, 0.0154, -0.0251, 0.1573, 0.1078,
0.2262, -0.0077, -0.1092, -0.4373, -0.0148, 0.0814, 0.1900, -0.2188],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[-0.1421, 0.0787, 0.0244],
[-0.2759, 0.0849, 0.2214],
[-0.2342, -0.0239, 0.1523]],
[[ 0.1296, 0.0031, 0.0282],
[ 0.1103, -0.1166, -0.2138],
[-0.0851, -0.1004, -0.0596]],
[[-0.0112, 0.0662, 0.1574],
[-0.0748, 0.0073, 0.1678],
[-0.1450, -0.1321, 0.0850]],
...,
[[ 0.0427, -0.0241, -0.0246],
[ 0.0759, 0.1394, -0.1498],
[ 0.2160, 0.0455, -0.0961]],
[[ 0.1562, 0.0224, -0.0182],
[-0.0397, 0.1664, 0.1455],
[-0.1397, -0.1753, 0.0333]],
[[-0.1518, -0.0394, 0.2626],
[-0.0449, 0.0098, -0.1357],
[-0.2491, -0.0294, 0.1181]]],
[[[-0.0132, -0.1264, -0.0956],
[ 0.1213, 0.1057, -0.0763],
[ 0.1005, 0.1846, 0.0329]],
[[-0.0054, -0.0045, -0.1181],
[-0.1028, 0.0441, 0.0470],
[-0.0030, 0.0570, 0.0455]],
[[-0.1962, -0.1911, -0.1754],
[-0.0041, 0.1221, -0.0181],
[ 0.0293, 0.1887, 0.0766]],
...,
[[-0.0059, -0.1655, -0.1525],
[-0.0445, 0.1209, 0.1201],
[-0.0759, -0.0665, 0.0552]],
[[-0.2450, -0.2935, -0.2459],
[-0.0753, -0.1807, -0.2331],
[-0.0065, -0.0277, -0.0540]],
[[ 0.1603, -0.0576, -0.1096],
[ 0.1940, 0.0906, 0.0861],
[-0.0692, 0.1437, 0.0320]]],
[[[-0.0209, 0.1801, -0.0549],
[-0.1051, 0.1844, -0.1319],
[-0.0628, 0.2213, -0.0679]],
[[ 0.0391, 0.0468, -0.0879],
[ 0.0792, 0.1103, -0.0924],
[-0.0879, 0.1233, -0.1691]],
[[-0.0724, 0.0349, -0.2209],
[-0.0937, 0.0359, -0.1679],
[-0.2165, 0.0386, -0.1619]],
...,
[[-0.1004, -0.0093, -0.0040],
[ 0.0190, -0.0718, -0.1982],
[-0.0754, 0.0273, -0.0558]],
[[-0.0362, -0.0331, -0.1089],
[-0.0480, 0.0442, -0.0632],
[-0.0382, -0.0211, -0.0525]],
[[-0.0036, 0.1863, -0.0547],
[ 0.0300, 0.1571, -0.2079],
[ 0.0550, 0.1921, -0.0755]]],
...,
[[[-0.2068, -0.2282, -0.0381],
[-0.0366, -0.2884, 0.0677],
[ 0.0708, -0.1896, 0.0309]],
[[ 0.0009, 0.0576, 0.0266],
[-0.0239, -0.0297, 0.1710],
[ 0.0027, 0.0284, 0.1519]],
[[-0.1681, -0.1388, -0.0380],
[ 0.0318, -0.0674, -0.2080],
[ 0.2075, 0.1092, -0.1643]],
...,
[[-0.0498, -0.0333, 0.0678],
[ 0.0112, 0.0147, 0.1377],
[-0.0196, 0.0838, 0.0183]],
[[ 0.0280, -0.0135, -0.0743],
[ 0.0358, -0.0105, -0.0011],
[ 0.0108, -0.2292, -0.2370]],
[[ 0.0496, -0.0738, -0.0426],
[-0.1283, 0.0403, 0.0389],
[-0.1922, -0.0840, 0.0475]]],
[[[-0.0708, -0.2339, 0.0638],
[-0.0008, -0.1832, 0.0957],
[-0.0236, -0.1736, 0.1158]],
[[-0.0170, 0.0356, 0.1160],
[-0.1441, -0.0893, 0.0585],
[-0.1424, -0.1627, 0.0515]],
[[ 0.1374, -0.0964, -0.0612],
[ 0.2102, -0.1812, 0.0144],
[ 0.1363, -0.1555, 0.0072]],
...,
[[-0.0626, 0.0601, 0.1518],
[-0.0278, 0.0495, 0.1060],
[-0.0529, -0.0261, -0.0100]],
[[-0.0381, -0.1558, -0.0507],
[-0.0976, -0.0386, 0.0140],
[ 0.1386, 0.0623, 0.0262]],
[[-0.0496, -0.0831, -0.2292],
[ 0.0097, -0.2070, -0.1290],
[-0.0995, -0.0465, -0.0414]]],
[[[-0.0151, -0.2027, 0.0419],
[ 0.1440, -0.0802, -0.0299],
[-0.0482, -0.0344, -0.0187]],
[[ 0.0649, 0.1003, 0.0534],
[-0.0182, -0.0620, 0.0424],
[-0.0844, -0.0934, -0.0895]],
[[ 0.0056, -0.2361, 0.0637],
[ 0.0823, -0.0121, -0.0405],
[ 0.0285, 0.0332, 0.0678]],
...,
[[ 0.1041, -0.0041, -0.0324],
[-0.0660, 0.0562, 0.0862],
[ 0.0342, -0.0175, 0.0216]],
[[-0.0176, 0.1048, 0.1485],
[ 0.1332, 0.0034, -0.0782],
[ 0.0986, 0.0965, -0.1405]],
[[-0.0957, -0.1433, 0.0613],
[ 0.0362, -0.1624, 0.0802],
[ 0.3151, -0.0630, 0.0534]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([ 0.0669, -0.2354, 0.0406, 0.1709, 0.2321, 0.0873, -0.2867, 0.0093,
0.2183, 0.1163, 0.1452, -0.0360, 0.2080, -0.0318, 0.1267, -0.0151,
0.0475, 0.1459, 0.1380, -0.0694, -0.0496, 0.0934, -0.0956, 0.0038,
0.3013, 0.1857, -0.0405, -0.0835, 0.2227, 0.2037, 0.1348, 0.0647],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[-6.0249e-02, -4.7680e-02, -4.9633e-02],
[-4.2409e-02, 1.1505e-02, -7.4067e-02],
[-1.5861e-02, -3.4246e-02, -5.3277e-02]],
[[ 3.3892e-02, 1.8368e-02, -5.6781e-02],
[ 9.4239e-03, -1.4611e-02, -5.5044e-02],
[-5.1969e-02, -3.9931e-02, 1.4336e-02]],
[[ 1.4180e-02, 8.2663e-02, 6.2874e-02],
[ 3.3552e-02, 3.3541e-02, -1.3169e-02],
[ 5.2495e-02, 1.2322e-02, -3.8741e-03]],
...,
[[-3.1956e-02, 1.7240e-02, -3.3978e-02],
[-7.5627e-02, -2.3902e-02, 3.5281e-02],
[-5.1511e-02, -1.0910e-02, -5.6766e-02]],
[[-3.0012e-02, -3.6020e-02, -3.1865e-02],
[ 1.5846e-02, -2.6054e-02, -9.7120e-02],
[-1.0954e-01, -3.0675e-02, 5.1296e-03]],
[[-5.8529e-02, -3.4336e-02, -4.3632e-03],
[-8.4689e-02, -6.8769e-02, 1.3912e-02],
[-3.3587e-02, -8.7321e-02, 1.2357e-02]]],
[[[-9.8787e-02, 1.7993e-02, 2.6032e-04],
[-1.4388e-02, 1.9527e-02, 1.9204e-02],
[-2.1058e-02, -7.4642e-02, -7.1565e-02]],
[[-3.4930e-02, -5.4371e-02, -1.6246e-02],
[-4.9779e-02, -1.9837e-02, -6.4319e-02],
[-6.8076e-02, -7.0359e-02, -7.9863e-03]],
[[ 8.9334e-03, 5.3734e-02, 4.4527e-02],
[-4.8741e-02, -1.7988e-02, -5.1995e-02],
[ 3.1868e-02, -4.5328e-02, 4.3909e-02]],
...,
[[-4.5884e-02, 2.8877e-02, -1.0192e-01],
[-9.2510e-03, -1.6060e-02, -9.2494e-02],
[-4.3427e-02, 5.5797e-02, 3.6034e-02]],
[[-1.4959e-02, 1.9608e-02, -9.4817e-02],
[ 4.1721e-02, -2.1941e-02, -8.6618e-02],
[ 4.0972e-03, -8.2234e-02, 4.2009e-02]],
[[-6.3717e-02, 1.0006e-02, -8.8828e-02],
[-4.2950e-02, -1.7893e-02, -1.3119e-01],
[-3.3457e-02, -1.3753e-02, 9.9283e-04]]],
[[[ 1.0513e-01, 9.2778e-02, -2.8567e-02],
[-6.0667e-02, 5.6298e-02, 1.2497e-01],
[-7.9337e-02, 1.0284e-01, 8.8662e-02]],
[[-1.7751e-01, 3.4660e-02, 2.6397e-01],
[-2.0336e-02, 1.7489e-02, -2.6172e-02],
[-5.6665e-02, 2.2845e-02, 3.2239e-02]],
[[ 8.7159e-02, 6.7568e-02, 3.6279e-02],
[ 3.8156e-02, 2.3175e-03, 1.0761e-01],
[-6.2513e-02, -1.1393e-01, 7.5307e-02]],
...,
[[ 8.5832e-02, -3.1332e-02, -1.9154e-01],
[ 4.6191e-02, 4.8180e-02, 1.5670e-01],
[ 9.8826e-03, 1.1468e-01, 1.7651e-01]],
[[ 1.4294e-02, -2.3152e-01, -4.0487e-01],
[ 8.8142e-02, -6.9739e-02, 9.9929e-04],
[ 2.1160e-03, 1.9166e-01, 1.3617e-01]],
[[ 4.3703e-02, 1.2358e-01, 2.5446e-01],
[ 1.7948e-02, 1.0332e-01, 1.0325e-01],
[-9.9844e-02, 9.6209e-02, -6.1282e-02]]],
...,
[[[-4.8150e-02, -6.5832e-02, -2.6601e-02],
[-6.6379e-03, -1.5049e-02, -4.0001e-02],
[ 2.5216e-02, 7.6716e-03, 1.3715e-02]],
[[ 5.4661e-02, 4.2794e-02, -2.4500e-02],
[ 3.6180e-02, -7.7438e-03, -6.7770e-03],
[ 1.0836e-02, -1.3885e-01, -3.7339e-02]],
[[ 4.3374e-03, -6.4037e-02, 4.5646e-02],
[-7.9242e-03, 2.8889e-02, -1.0769e-02],
[-6.2777e-03, -1.5435e-02, 5.5657e-02]],
...,
[[-7.9252e-02, -1.7169e-02, -9.5197e-02],
[-1.1340e-01, -5.0760e-02, 6.4206e-03],
[-1.3622e-01, -9.0019e-02, -4.9178e-02]],
[[-5.8269e-02, -6.3467e-02, 1.4647e-02],
[-7.0218e-02, -4.3700e-02, -8.5096e-02],
[-4.7402e-02, -2.5299e-03, -7.2822e-02]],
[[-6.2187e-02, -6.1465e-02, -6.0336e-02],
[-6.8485e-02, -1.1602e-01, -7.9894e-02],
[-9.9564e-02, -9.5502e-02, -1.1613e-03]]],
[[[ 1.0375e-01, -4.7412e-02, -4.9659e-02],
[ 4.5850e-02, 7.0374e-02, -1.0803e-01],
[ 6.3761e-02, 1.0773e-01, -1.0809e-01]],
[[-1.0929e-01, -8.1735e-02, -2.5334e-02],
[ 1.0884e-01, 3.0243e-03, 4.4169e-02],
[-1.8197e-02, 1.2415e-01, 2.2605e-01]],
[[-2.1119e-01, -5.8678e-02, 1.3647e-01],
[-1.2894e-01, -5.1490e-02, -9.4533e-03],
[-1.7879e-01, -1.0052e-01, 2.6416e-02]],
...,
[[-5.4015e-02, -3.0913e-01, 3.9362e-01],
[ 1.8461e-01, -1.0984e-01, -1.2560e-01],
[ 1.4137e-01, 4.1934e-03, -1.7232e-01]],
[[-2.9610e-02, -2.1450e-01, 2.4045e-01],
[ 9.9810e-02, -1.3725e-01, 6.9746e-02],
[ 1.5609e-01, 4.7546e-02, -3.0073e-01]],
[[ 1.7658e-01, 1.0084e-01, -2.1304e-01],
[ 1.4570e-01, 1.3238e-01, -7.8003e-02],
[ 1.2249e-02, 6.3983e-02, -1.5637e-01]]],
[[[ 6.4775e-02, -8.1769e-02, -6.0009e-02],
[ 2.9438e-02, 7.0084e-02, 6.0134e-02],
[ 6.7826e-02, 8.2022e-03, 1.2608e-01]],
[[ 1.1785e-01, 1.7344e-01, 2.7051e-01],
[ 1.2595e-01, 9.2988e-03, 6.7104e-02],
[-2.5964e-02, -1.4302e-02, 2.0793e-02]],
[[-2.5238e-04, 8.9474e-02, 2.6326e-03],
[ 8.7567e-03, 8.5881e-02, -4.6982e-03],
[-3.0057e-02, 1.0732e-01, 2.1894e-02]],
...,
[[ 1.0352e-02, -2.4759e-01, -3.3401e-01],
[ 4.5734e-02, -1.4318e-02, 4.1219e-04],
[ 8.2775e-02, 3.3163e-01, 3.0546e-01]],
[[-2.5174e-01, -4.1958e-01, -1.4841e-01],
[-1.1307e-01, -1.4754e-01, -3.7038e-01],
[-2.3225e-02, 1.5159e-01, -7.0286e-02]],
[[ 1.2826e-01, 2.2448e-01, -1.0711e-01],
[ 1.2429e-01, 1.3671e-01, 1.7967e-01],
[ 8.8658e-02, 2.3414e-01, 5.8435e-02]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([-0.0587, -0.0772, 0.0657, -0.1081, -0.0051, 0.1057, 0.0740, -0.0086,
-0.1607, 0.0823, -0.2197, 0.1727, 0.1922, 0.0140, 0.0465, -0.0862,
-0.0434, 0.0255, 0.0405, 0.1700, 0.1987, -0.1403, -0.1055, -0.1500,
0.0287, -0.0845, -0.0307, -0.0993, -0.0490, -0.0208, 0.1973, 0.1793],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[ 1.8507e-02, -7.6014e-03, -2.7827e-02],
[-4.9924e-02, -1.3047e-02, -2.8608e-02],
[-5.2371e-02, 5.0726e-02, 5.4321e-02]],
[[ 4.9170e-02, -6.5198e-02, 9.4211e-03],
[ 1.1598e-02, 6.6673e-03, -5.4773e-03],
[ 3.7412e-02, 2.4612e-02, -6.5157e-02]],
[[-1.2610e-02, 4.0940e-02, 1.3868e-02],
[-1.3531e-01, -2.8991e-01, 3.5015e-02],
[-3.0779e-01, -2.6283e-01, -8.1577e-02]],
...,
[[ 5.0785e-02, -3.3046e-03, 1.8746e-02],
[ 6.2283e-02, -9.6864e-02, -3.7083e-02],
[ 2.4145e-02, -2.2859e-02, 6.0468e-03]],
[[ 7.2714e-03, 1.2079e-01, 6.0629e-02],
[-1.3487e-01, -1.7893e-01, -7.2933e-03],
[-2.3277e-01, -2.3317e-01, -5.4986e-02]],
[[-1.0174e-02, -6.7053e-02, -1.3645e-01],
[-1.2852e-01, -2.1792e-01, 2.7154e-02],
[-1.9811e-01, -1.4019e-01, 2.9636e-02]]],
[[[ 3.4411e-02, -5.1726e-02, 7.7560e-03],
[-6.5155e-03, -5.5046e-02, -8.4036e-02],
[-6.7222e-02, 2.3636e-02, -5.5373e-02]],
[[-3.4385e-02, 3.6046e-02, -6.0904e-02],
[ 3.7518e-02, -3.8944e-02, 8.3729e-02],
[-2.4010e-04, 4.2459e-02, 3.0047e-02]],
[[ 2.8260e-02, 1.0598e-01, 9.9962e-02],
[ 4.6251e-02, -4.8244e-03, -8.1454e-02],
[ 7.9582e-03, -3.6021e-02, 4.1207e-02]],
...,
[[-6.6003e-02, -2.6305e-02, -3.8636e-02],
[ 6.2314e-02, 2.6482e-02, -5.0984e-03],
[-7.3548e-04, 7.5723e-02, 1.6574e-02]],
[[ 4.8893e-02, 3.8552e-02, 2.6636e-02],
[-7.9797e-02, -1.6332e-01, -3.0022e-02],
[-1.0690e-01, 9.9440e-03, -3.2803e-03]],
[[ 2.0646e-01, 8.9860e-02, -9.7461e-02],
[ 1.3040e-01, -8.5106e-02, -1.9576e-01],
[-4.8543e-02, -7.6060e-02, -7.6088e-02]]],
[[[-5.6792e-02, -5.4921e-02, 9.1019e-03],
[ 2.0912e-02, 4.7291e-03, 2.1768e-02],
[-1.8561e-03, 3.6906e-02, -8.1393e-03]],
[[ 1.1692e-02, 1.4457e-02, -1.9055e-02],
[-1.8812e-02, 7.2492e-03, 1.1724e-01],
[ 4.7819e-02, 7.4907e-03, 3.7707e-02]],
[[-1.3640e-01, -1.2272e-01, -7.6744e-02],
[-1.2411e-01, 1.5050e-01, 1.4581e-01],
[ 5.5735e-03, 9.7282e-02, 2.2874e-02]],
...,
[[-1.0006e-01, -7.6445e-02, 8.5266e-02],
[-4.9553e-02, -8.9111e-03, 1.4661e-01],
[-7.0014e-02, 3.7815e-02, -7.0599e-02]],
[[-2.0441e-01, -2.2835e-01, -1.8513e-01],
[-8.4445e-02, 1.7430e-01, 1.9410e-01],
[-3.8483e-02, 2.5623e-01, 1.5309e-01]],
[[-1.4733e-01, -1.6539e-01, -2.6320e-02],
[-1.5151e-01, 1.8591e-01, 2.5701e-01],
[-1.0479e-02, 2.1080e-01, 1.8418e-01]]],
...,
[[[ 6.5472e-02, -3.6849e-02, -3.4742e-02],
[ 4.7488e-03, 2.3862e-02, -5.5334e-02],
[-4.5199e-02, -7.4572e-02, -2.7218e-02]],
[[-3.2559e-02, 1.4242e-02, 1.7540e-02],
[ 1.6215e-02, -3.9014e-02, -4.0484e-02],
[ 8.5735e-02, 5.4079e-02, 4.6531e-02]],
[[ 5.9779e-02, 1.5748e-02, 1.2473e-01],
[-6.3147e-02, -4.3008e-02, 1.5271e-02],
[-4.1820e-02, -2.3915e-01, -1.0852e-01]],
...,
[[ 2.8990e-02, -2.2520e-02, 2.7042e-02],
[-2.1528e-02, 9.3096e-02, 1.8889e-02],
[ 3.5683e-02, -7.7031e-02, 4.0823e-02]],
[[-1.7534e-01, -2.6252e-01, -2.5844e-01],
[-1.2713e-01, -1.6444e-02, -2.3910e-01],
[ 6.6962e-02, 1.9493e-01, 4.2891e-02]],
[[ 3.1977e-02, -6.9477e-03, -4.7805e-02],
[-9.6778e-02, -6.2548e-02, -9.3167e-02],
[-6.9098e-03, 3.0038e-02, -2.1956e-01]]],
[[[ 3.2961e-03, 1.8366e-02, 7.6549e-02],
[ 6.4474e-02, 4.9725e-02, 3.0600e-02],
[ 4.8585e-02, 6.4965e-03, 3.2874e-02]],
[[ 4.1199e-02, 3.3011e-02, 3.4724e-03],
[ 3.1810e-02, 2.5880e-02, -7.5140e-02],
[ 1.9162e-02, 1.3219e-02, -1.5559e-02]],
[[ 1.4549e-01, -1.3531e-01, -1.0052e-01],
[-5.6440e-02, -2.1287e-01, -1.0017e-01],
[ 5.2943e-02, -7.3187e-03, -1.0161e-02]],
...,
[[ 1.5548e-03, 3.3653e-02, 5.1592e-02],
[ 3.8207e-02, 4.2841e-03, -2.0838e-02],
[ 2.6479e-02, -3.0663e-02, 1.1049e-01]],
[[ 1.6795e-01, 7.2014e-02, -8.9813e-02],
[ 7.5576e-02, 3.4671e-02, -6.9649e-02],
[-4.9612e-02, 5.9553e-02, 4.2178e-02]],
[[ 1.1299e-02, -6.2531e-02, 7.9189e-02],
[-9.9817e-02, -7.4387e-04, 7.1637e-02],
[ 6.2294e-02, 9.5247e-02, 1.1321e-01]]],
[[[ 6.2024e-02, 7.3312e-02, -1.2506e-04],
[-1.2543e-02, 5.3937e-02, -2.5965e-02],
[-6.0086e-03, 4.0373e-02, 2.5121e-02]],
[[-1.8561e-02, -3.5005e-02, -1.8121e-02],
[-8.7153e-02, -4.2768e-02, -5.7406e-02],
[-8.5265e-03, -3.9740e-02, -3.1747e-02]],
[[-8.9453e-02, 3.3183e-03, -2.2946e-02],
[ 9.3770e-02, -2.9019e-02, -2.5258e-01],
[ 1.7038e-01, 9.3627e-02, -1.6621e-01]],
...,
[[ 6.2041e-02, -3.7758e-02, -8.1577e-02],
[ 1.2627e-01, -6.3343e-02, -1.2313e-01],
[ 1.1076e-01, -6.6265e-05, -1.2016e-01]],
[[ 4.8280e-02, 1.6339e-02, -2.9461e-02],
[ 6.4130e-02, -5.2262e-03, -3.6539e-01],
[ 1.4694e-01, -5.6618e-02, -2.6376e-01]],
[[ 1.2990e-01, 1.9490e-01, -2.4851e-01],
[ 1.5208e-01, 1.5768e-01, -3.0607e-01],
[ 9.2986e-02, 3.0606e-02, -1.0031e-01]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([ 0.2332, 0.0662, 0.0674, 0.0110, -0.1243, 0.0779, 0.0423, 0.0933,
0.1211, 0.0421, -0.1627, -0.1504, 0.0797, -0.0456, 0.1909, -0.0567,
-0.0617, 0.1214, 0.2832, 0.0708, 0.1643, 0.2084, -0.0499, 0.1916,
0.1317, -0.1674, -0.0415, -0.0561, -0.0946, -0.0556, 0.2203, 0.1069],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[-5.3394e-02, -3.3032e-02, -1.7500e-01],
[-1.0444e-01, -1.7157e-01, -1.7022e-01],
[-2.1593e-01, -1.0007e-02, -5.9867e-03]],
[[ 2.0962e-02, -8.0709e-02, 1.7486e-02],
[-3.1764e-02, -7.8272e-02, 3.9994e-02],
[-1.2074e-01, 1.3889e-01, 1.3780e-01]],
[[-5.5835e-02, 8.3085e-02, 9.2081e-02],
[-1.9536e-02, 1.0506e-01, 1.1978e-01],
[-1.1470e-03, 1.8167e-01, 7.0293e-02]],
...,
[[-1.4026e-01, -1.8574e-01, -5.9854e-02],
[-1.6883e-01, -9.6218e-02, -2.1181e-02],
[-1.4155e-01, 5.4455e-02, 3.2978e-02]],
[[ 2.3919e-01, 6.2881e-04, -2.2499e-01],
[ 1.2128e-01, -2.7819e-01, -4.3334e-01],
[-2.7745e-01, -3.7223e-01, -1.4053e-01]],
[[-2.3751e-02, 1.0258e-01, 2.9015e-01],
[ 6.8545e-02, 1.3335e-01, 3.5266e-02],
[ 2.3442e-02, -2.7169e-02, 3.6191e-03]]],
[[[ 3.0944e-02, 1.0694e-01, 1.1634e-01],
[ 4.0692e-02, 1.1285e-01, 1.1077e-01],
[ 1.2101e-01, 2.7936e-01, 2.0824e-01]],
[[-8.1915e-02, -2.0695e-02, 4.1837e-02],
[-2.5952e-02, 7.6070e-02, 9.2328e-02],
[ 1.2627e-01, 2.0855e-01, 1.3146e-02]],
[[ 9.4906e-02, 3.4046e-03, -1.3611e-02],
[-3.5797e-02, -8.9604e-02, 7.7280e-03],
[-3.7875e-01, -4.0164e-01, 3.7460e-02]],
...,
[[-1.1718e-02, -2.0360e-01, -8.0724e-02],
[-1.2100e-01, -2.2867e-01, 1.4217e-02],
[-1.1834e-01, -1.5123e-02, 1.3284e-01]],
[[-1.2377e-01, 1.4912e-02, 6.3105e-02],
[ 5.3561e-02, 4.6618e-02, 2.2550e-02],
[-1.3670e-02, -2.7146e-02, 1.2791e-02]],
[[-3.7642e-01, -2.5314e-01, -1.7805e-01],
[-1.6656e-01, -3.3300e-02, -2.9357e-03],
[ 1.0704e-01, 1.6463e-02, 3.2449e-01]]],
[[[ 5.9285e-02, 1.0602e-01, -9.1456e-02],
[ 2.7246e-01, 1.8697e-01, 3.9736e-02],
[ 5.5600e-02, 8.7527e-02, 1.4089e-01]],
[[ 1.3006e-01, -1.2115e-01, -1.3994e-01],
[-7.3819e-02, -1.2694e-01, -4.5978e-02],
[-1.5250e-01, -1.2276e-01, -6.1456e-02]],
[[-8.0290e-02, 8.4637e-02, 1.9813e-01],
[-2.1144e-02, -2.0652e-02, 1.4774e-02],
[-1.0715e-02, -6.9683e-02, 1.2205e-01]],
...,
[[-2.4882e-02, -4.5086e-02, -1.0819e-01],
[ 5.9224e-02, 6.4737e-03, -9.1784e-02],
[-7.1157e-03, -1.1084e-01, -4.3825e-02]],
[[ 1.9422e-02, -1.3270e-01, -1.5824e-01],
[ 1.5734e-01, 3.1701e-02, -5.4944e-02],
[-1.2732e-01, -3.0134e-02, -8.0466e-02]],
[[-1.3706e-02, -9.0749e-02, -4.6027e-02],
[ 3.1662e-03, -6.1291e-02, -1.1953e-01],
[-3.6822e-02, 9.6424e-03, 1.2572e-02]]],
...,
[[[ 6.7645e-02, -3.8131e-02, -6.6346e-02],
[-6.9014e-02, -8.2444e-02, -1.3124e-01],
[-6.3205e-02, -5.8864e-02, -3.2221e-02]],
[[-8.0984e-03, -4.1734e-02, 7.7043e-04],
[-8.4831e-02, -1.5284e-02, -1.1559e-02],
[-7.1212e-02, -9.7773e-02, 2.9083e-02]],
[[-1.8975e-02, -1.0490e-02, -7.7250e-03],
[ 1.4356e-02, 5.4499e-03, 7.8573e-03],
[ 1.0352e-02, 2.3334e-02, -7.2179e-02]],
...,
[[-2.5510e-02, -4.6055e-02, -6.3790e-02],
[-3.4029e-02, -4.0895e-02, 4.7753e-02],
[-4.9111e-03, -3.8290e-02, -2.0859e-02]],
[[ 1.1437e-02, -1.1080e-01, -8.5315e-02],
[-1.1035e-02, -4.2484e-02, -2.4077e-02],
[-1.2157e-02, -3.8788e-02, 2.7120e-03]],
[[-9.1576e-02, -7.5539e-02, -3.6740e-02],
[-1.1207e-01, -1.2570e-02, -3.7869e-03],
[-1.8816e-02, -8.2888e-02, -8.3438e-02]]],
[[[ 1.6269e-01, 7.1077e-02, 1.6253e-02],
[-1.0899e-04, -2.1086e-01, -1.1782e-01],
[ 1.9133e-01, -7.9114e-02, -9.2110e-02]],
[[-7.8476e-03, 9.6635e-02, 9.3418e-02],
[ 3.3619e-02, 5.7234e-02, -4.0397e-02],
[-6.0629e-02, -7.0535e-02, -6.1992e-02]],
[[-1.1916e-01, -1.1989e-01, -4.0324e-02],
[ 3.6287e-03, 2.1257e-02, 7.0234e-03],
[ 1.1799e-01, 3.6358e-02, 4.7265e-02]],
...,
[[-2.6983e-02, -2.3125e-01, -1.3326e-01],
[-3.0773e-02, -5.1642e-02, 1.8080e-01],
[-6.0414e-02, 2.5654e-02, 2.0745e-02]],
[[ 9.5003e-02, 3.2040e-02, 6.7393e-02],
[ 1.4505e-01, -6.6450e-02, -8.7676e-02],
[ 1.0872e-02, -1.9119e-01, -1.5476e-01]],
[[ 8.0844e-02, -1.5147e-02, -9.4457e-02],
[-6.8325e-02, 4.9522e-02, -9.5242e-02],
[-1.9387e-01, 2.9114e-03, -2.7918e-01]]],
[[[ 2.9851e-02, 3.0191e-03, -3.8254e-02],
[ 1.1228e-01, 2.3700e-04, 1.5098e-01],
[-2.7228e-02, -5.2564e-02, -2.3355e-02]],
[[-2.2491e-01, -2.1092e-02, 8.0491e-02],
[-3.9722e-02, 2.5618e-01, 5.9999e-02],
[ 1.2137e-01, 3.8368e-03, -2.2061e-01]],
[[-1.0671e-01, -1.5087e-01, -1.2742e-01],
[-1.3729e-01, -1.4063e-01, 5.4138e-02],
[-6.6497e-02, 7.6479e-02, 2.0604e-01]],
...,
[[ 3.8365e-02, 3.0036e-02, -2.0231e-01],
[-5.1822e-02, -2.1259e-04, 5.3925e-02],
[-1.4697e-01, 5.5567e-02, 2.0995e-01]],
[[-2.2204e-01, -2.2360e-01, 4.8695e-03],
[-2.1358e-02, 1.4381e-02, 5.1366e-02],
[ 6.4198e-02, 1.3439e-01, 5.0784e-02]],
[[-3.7537e-02, -3.5798e-02, 2.9461e-02],
[-1.9132e-01, -6.3765e-02, -4.6183e-02],
[-1.6277e-01, 5.4196e-03, 3.4031e-02]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([ 0.0412, 0.0844, 0.0259, 0.0261, -0.2447, 0.2568, -0.1109, 0.0083,
0.1199, 0.0105, 0.0728, -0.0494, 0.0891, -0.1727, -0.0545, 0.0545,
0.1468, 0.1769, 0.0904, -0.1283, -0.0595, -0.2821, -0.0270, -0.0088,
0.0049, -0.0216, 0.0740, 0.2388, 0.0317, -0.0734, 0.1407, 0.1505],
device='cuda:0')),
('fc_layers.0.weight',
tensor([[ 0.1338, -0.0830, -0.0701, ..., -0.0552, 0.1848, 0.0569],
[-0.0430, -0.0963, -0.0202, ..., 0.0794, -0.0425, -0.0583],
[-0.0707, 0.1540, 0.1858, ..., -0.3642, -0.1486, 0.0667],
...,
[-0.0927, -0.0700, 0.0220, ..., -0.1011, -0.2301, 0.0036],
[-0.0638, -0.0737, -0.0601, ..., 0.0233, -0.0038, 0.0024],
[-0.0296, 0.0444, -0.0267, ..., -0.0127, 0.0410, -0.0076]],
device='cuda:0')),
('fc_layers.0.bias',
tensor([-0.0535, -0.0106, 0.0351, -0.0344, 0.0497, -0.0507, 0.1145, -0.0417,
-0.0070, -0.0661, 0.1564, -0.0212, 0.0230, -0.0342, -0.0549, -0.0330,
0.0035, 0.1416, 0.1952, 0.0097, 0.0245, 0.0293, 0.1254, -0.0095,
0.0477, -0.0593, -0.0676, 0.0300, -0.0553, -0.0491, -0.0262, -0.0516,
-0.0040, -0.0277, -0.0676, 0.0195, -0.0613, 0.0701, 0.0187, 0.1514,
0.0191, 0.0643, 0.0714, 0.1557, -0.0798, 0.1005, -0.0483, -0.0581,
-0.0514, 0.0122, -0.0168, 0.0148, -0.0493, -0.0404, 0.0964, -0.0714,
-0.0198, 0.0166, 0.1645, -0.0129, 0.0253, -0.0474, 0.0166, 0.0281,
-0.0311, 0.0161, -0.0207, -0.0677, -0.0048, -0.0581, 0.0711, -0.0194,
-0.0520, 0.0101, -0.0081, -0.0763, 0.0166, -0.0317, -0.0399, 0.0628,
0.0149, -0.0552, 0.1908, -0.0064, -0.0009, -0.0165, -0.0920, -0.1406,
-0.0663, -0.0600, 0.0513, -0.0351, -0.0101, 0.0576, 0.1030, 0.0366,
0.1678, -0.0411, 0.0555, 0.0197, 0.0218, -0.0519, 0.0861, -0.0229,
0.0599, -0.0384, -0.0639, 0.0641, -0.0140, -0.0187, 0.0162, 0.1072,
-0.0344, -0.0042, -0.0702, 0.1998, -0.1143, 0.1837, -0.0929, 0.0602,
0.1634, 0.0338, -0.0573, 0.0963, -0.0155, -0.0158, -0.0252, -0.0631,
-0.1161, -0.0799, 0.0343, -0.0458, -0.0734, 0.0228, -0.0877, 0.0881,
-0.0342, -0.1020, -0.0070, -0.0289, 0.0958, -0.0515, 0.0063, -0.0885,
0.2176, -0.0729, -0.0618, 0.0142, -0.0240, -0.0563, -0.0372, -0.0628,
-0.0324, 0.0095, -0.0012, 0.2216, 0.0889, 0.2480, 0.1328, 0.0248,
0.0202, -0.0854, -0.0106, 0.0744, 0.0570, -0.0167, 0.1208, -0.0259,
-0.0381, 0.0859, 0.1747, -0.0308, -0.0510, -0.0506, 0.1486, -0.0537,
-0.0244, 0.0941, 0.0187, -0.0563, 0.0006, -0.0580, -0.0115, -0.0446,
-0.0414, -0.0240, 0.0141, -0.0814, -0.0605, -0.0602, -0.0926, -0.0449,
0.0613, -0.0432, -0.0768, 0.2331, 0.0710, -0.0804, -0.0602, -0.0583,
0.1398, -0.0637, 0.1778, 0.0025, 0.0545, 0.0182, 0.1479, -0.1206,
0.0186, -0.0271, 0.2017, 0.0094, 0.1996, -0.0127, -0.0987, 0.0358,
0.0356, -0.0316, -0.0031, 0.1119, 0.1535, -0.0429, 0.0222, -0.0404,
-0.0841, -0.0735, -0.0217, -0.0302, -0.0505, -0.0397, 0.1115, -0.0057,
-0.0427, -0.1484, -0.0323, 0.1727, 0.0049, 0.1805, 0.0223, -0.0848,
0.0996, 0.0898, 0.1126, -0.0075, -0.0610, 0.0480, -0.0374, 0.0282,
0.0899, -0.0064, -0.0775, 0.0597, -0.0235, 0.1178, -0.0560, -0.0555],
device='cuda:0')),
('fc_layers.2.weight',
tensor([[-0.0107, -0.1723, 0.1310, ..., -0.2079, -0.0095, 0.0502],
[-0.0451, -0.1218, 0.0726, ..., 0.0390, 0.0243, 0.0230],
[-0.0263, -0.0507, -0.0798, ..., 0.0612, -0.0279, -0.0250],
...,
[-0.0281, -0.1113, 0.0942, ..., -0.0370, 0.0149, -0.0428],
[-0.0704, -0.1204, -0.0132, ..., -0.0801, -0.0174, 0.0241],
[ 0.2799, -0.0068, -0.0637, ..., 0.0396, -0.0510, -0.0517]],
device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.0107, 0.0480, 0.1707, 0.1714, 0.0521, 0.0169, -0.0631, -0.2361,
-0.0746, -0.0835], device='cuda:0'))])},
{'ratio': 0.42,
'bias': 64,
'train_losses': [282.60764892271885,
235.37955838015358,
204.22986490002478,
187.64299932137834,
179.13417499851806,
173.17907572666388,
169.2916281242021,
166.69969320422067,
163.14617443604828,
161.75311170635422,
159.6966869872694,
157.40749053201958,
155.83224352533696,
155.0289292418728,
153.48523064745658],
'test_losses': [258.40531329547656,
216.31698507421157,
193.93308870465148,
180.2789241285885,
178.36074218563004,
166.73174673903222,
166.23749574960448,
163.17488797505698,
159.88001465329936,
155.230825919731,
153.21277178970038,
151.62870871786978,
153.4908104503856,
149.596092747707,
149.46524577982285],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[ 0.0299, 0.1992, 0.1454],
[-0.2200, 0.1662, 0.3127],
[-0.1900, -0.0635, 0.1163]],
[[-0.1398, -0.0624, -0.1178],
[ 0.1160, 0.3050, -0.1052],
[-0.0174, 0.0572, 0.0789]],
[[-0.1909, -0.2800, 0.0043],
[-0.0488, -0.0173, -0.0010],
[ 0.0561, -0.0799, 0.0830]]],
[[[ 0.0034, -0.2069, -0.1855],
[-0.0501, -0.1605, 0.0033],
[ 0.3103, 0.1878, 0.1488]],
[[-0.0831, 0.0165, -0.1320],
[ 0.2766, -0.1742, -0.2106],
[ 0.2749, 0.1765, 0.0110]],
[[ 0.0624, 0.0956, -0.2465],
[ 0.0640, -0.0829, -0.1020],
[ 0.1669, -0.0863, -0.0570]]],
[[[-0.1852, -0.0269, 0.2749],
[-0.1913, -0.1167, 0.2535],
[-0.0630, -0.0150, 0.0306]],
[[-0.1825, -0.1863, 0.1358],
[-0.3816, -0.0892, 0.1461],
[ 0.0316, 0.0975, 0.2995]],
[[ 0.0219, 0.1036, -0.0231],
[-0.1191, 0.1465, 0.1017],
[-0.1008, 0.1348, -0.0721]]],
[[[-0.0451, 0.2726, -0.0195],
[ 0.0372, 0.1192, 0.1304],
[-0.2697, -0.2386, -0.0790]],
[[ 0.2244, 0.0342, 0.2280],
[ 0.0863, 0.1627, -0.1004],
[-0.2135, -0.2273, -0.0835]],
[[-0.0186, 0.1765, 0.0025],
[ 0.1839, 0.0586, -0.1424],
[-0.1553, 0.0679, -0.1584]]],
[[[ 0.0962, 0.2591, 0.0865],
[ 0.2516, -0.1212, -0.1962],
[ 0.1221, -0.2138, -0.2478]],
[[ 0.1191, 0.0641, -0.0614],
[-0.0589, 0.0066, -0.1748],
[-0.1436, -0.0056, -0.0907]],
[[ 0.1803, -0.0534, 0.0564],
[ 0.1705, -0.0867, 0.0390],
[ 0.0441, -0.1424, 0.0122]]],
[[[ 0.1886, -0.1977, -0.1567],
[ 0.2009, -0.1965, 0.0095],
[ 0.2226, 0.1239, -0.1806]],
[[ 0.0792, -0.0583, -0.1291],
[ 0.2896, -0.0091, -0.1560],
[ 0.1970, 0.0035, -0.1703]],
[[ 0.1598, -0.0701, -0.0496],
[ 0.1183, -0.0831, 0.0310],
[-0.1891, 0.1703, -0.1239]]],
[[[ 0.1322, 0.1220, -0.0849],
[-0.1501, 0.0422, -0.1226],
[-0.0671, 0.0047, 0.0428]],
[[ 0.0794, -0.0787, -0.1289],
[ 0.1430, 0.0238, 0.0243],
[-0.1591, 0.1246, -0.1542]],
[[ 0.1524, -0.1434, -0.2485],
[ 0.1327, 0.1398, -0.1737],
[ 0.2267, 0.0828, -0.1050]]],
[[[-0.2568, -0.1581, -0.1180],
[-0.0342, 0.1907, 0.0716],
[ 0.2200, -0.0296, 0.0113]],
[[-0.2373, -0.1872, -0.2156],
[ 0.1613, 0.1042, -0.1893],
[ 0.1941, 0.2340, 0.1765]],
[[-0.1486, 0.0018, -0.0511],
[ 0.1137, -0.0365, -0.0520],
[ 0.1238, 0.1157, 0.0155]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([-0.0488, -0.3958, 0.1195, 0.2599, 0.0256, 0.2128, 0.0446, 0.2138],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[-1.8288e-01, -5.4573e-02, 5.9000e-02],
[ 4.6796e-02, -1.5163e-01, -6.5135e-02],
[-7.1939e-02, 9.1060e-02, 7.1986e-02]],
[[-1.3311e-02, -1.3247e-01, -2.6482e-01],
[-2.5631e-01, -9.9941e-02, -1.8256e-01],
[-3.2843e-01, -2.4315e-01, -1.7403e-01]],
[[-8.5823e-02, -7.3051e-02, -6.5546e-02],
[-1.2274e-02, -1.7196e-01, 1.0048e-01],
[-3.6885e-02, -1.6163e-01, -3.8555e-02]],
...,
[[-4.2696e-02, -6.9174e-02, -4.7302e-02],
[ 1.7203e-04, -4.4347e-02, 1.1671e-01],
[ 2.3286e-02, 1.3775e-02, 1.1416e-01]],
[[ 9.7581e-03, 3.7314e-02, -2.5656e-02],
[-3.3696e-02, -2.7357e-02, 4.5200e-02],
[ 1.6241e-03, 8.0888e-02, 1.1059e-01]],
[[ 1.2583e-01, 1.2422e-01, 1.3985e-01],
[ 1.1632e-02, 2.0998e-01, 2.3952e-01],
[ 1.6040e-01, 1.4229e-01, 2.6707e-01]]],
[[[-1.3646e-01, -4.9026e-02, -1.3024e-01],
[-1.9562e-02, 5.3229e-02, -1.3278e-01],
[ 1.0279e-01, 1.8070e-01, 1.6235e-01]],
[[ 8.1139e-02, -2.6327e-03, -2.0528e-01],
[ 6.2519e-02, 1.7544e-01, -1.2499e-01],
[ 3.6184e-01, 8.2646e-02, 1.8698e-02]],
[[-3.6936e-04, -9.0678e-02, 6.5097e-02],
[-1.4635e-01, -1.8314e-01, 8.0097e-02],
[ 6.9064e-02, -1.3935e-01, -1.0302e-01]],
...,
[[ 1.0237e-01, 5.4547e-02, 2.6515e-01],
[-7.5253e-02, -6.0758e-02, 1.3726e-01],
[-1.6849e-01, -8.7202e-02, 1.7506e-01]],
[[ 7.0347e-02, -6.3888e-03, -1.5639e-01],
[ 4.8929e-02, -5.3829e-02, -7.6926e-02],
[ 1.5726e-01, 1.3781e-01, 2.0070e-02]],
[[ 7.0392e-02, 2.0650e-02, 1.6458e-01],
[-3.8485e-02, 1.3836e-01, 1.1764e-01],
[-2.0546e-01, 7.9623e-02, 1.5030e-01]]],
[[[-2.1943e-01, -1.4046e-01, 1.4601e-02],
[ 1.2869e-01, -8.4276e-02, -1.9815e-01],
[ 2.5570e-01, 1.4803e-01, -3.2759e-02]],
[[-1.0881e-02, -1.3843e-01, -1.3050e-01],
[ 3.2970e-02, -9.8012e-02, -1.9192e-01],
[-1.8802e-01, -2.3610e-01, -1.5475e-01]],
[[-1.4762e-02, -6.3658e-02, -1.4158e-01],
[ 1.9743e-01, -1.1301e-01, -2.3235e-01],
[ 2.0974e-01, -4.8281e-02, -2.0981e-01]],
...,
[[-7.2307e-03, 1.2352e-01, 3.0754e-02],
[ 1.6289e-01, 7.4349e-02, 2.5877e-01],
[ 1.2801e-01, 1.0157e-01, 1.9046e-01]],
[[-6.4616e-02, -7.5302e-02, -8.4134e-02],
[ 3.3418e-02, -4.5594e-02, -6.5953e-02],
[ 4.3658e-02, 1.6072e-01, 7.2350e-02]],
[[ 4.0035e-02, 9.0943e-02, 1.3612e-01],
[-1.5703e-02, 1.2736e-01, -1.1301e-02],
[ 4.3178e-02, 2.2076e-03, -6.7556e-03]]],
...,
[[[-2.0502e-01, -1.7602e-01, -3.5026e-03],
[-1.7827e-02, -1.1924e-02, 2.7797e-02],
[ 1.8955e-01, 1.0009e-01, 2.1302e-01]],
[[-1.0333e-01, -1.7641e-02, -6.9446e-02],
[-7.6612e-02, 1.6895e-02, -1.6108e-01],
[ 2.4237e-04, 1.6403e-01, -6.9740e-02]],
[[ 9.3339e-02, -4.1237e-02, 3.7230e-02],
[-2.9822e-02, -1.0308e-01, -1.8094e-01],
[ 8.2409e-03, -5.2952e-02, -1.5440e-01]],
...,
[[-2.4374e-02, -8.1836e-02, -1.4704e-01],
[-8.7103e-03, 8.0912e-02, -5.3338e-03],
[-6.9775e-02, 5.2919e-02, 5.8147e-02]],
[[-2.7343e-01, -1.5820e-01, -1.9277e-01],
[-1.8141e-01, -4.2421e-02, -1.4685e-01],
[ 1.0234e-01, 2.3037e-01, -6.0969e-02]],
[[ 6.6786e-02, 3.8477e-02, -5.0832e-02],
[ 9.1698e-02, 7.0441e-02, 4.6180e-03],
[ 7.1397e-02, 1.0533e-01, -1.1993e-01]]],
[[[-4.4799e-03, 1.5062e-01, -1.0177e-01],
[-9.1155e-02, 1.1930e-01, 1.3433e-01],
[-1.2985e-01, -1.5235e-01, -1.4749e-01]],
[[-4.7051e-03, 1.1101e-01, -2.3412e-02],
[ 3.8861e-01, 2.8021e-01, -1.9102e-01],
[ 6.5511e-02, -2.2532e-03, -3.4188e-01]],
[[ 6.1508e-02, -2.7742e-02, -3.2648e-01],
[ 5.4506e-02, -1.7922e-01, -1.9899e-01],
[ 1.0483e-02, 3.5208e-02, -4.4947e-02]],
...,
[[-2.2961e-01, 1.5339e-01, 2.2850e-01],
[-1.5800e-01, -9.8297e-02, 1.9441e-01],
[-1.2339e-01, -1.5412e-01, 9.4203e-02]],
[[ 1.3695e-02, -1.0928e-03, -1.3705e-01],
[ 7.6689e-02, 8.0152e-02, -6.7639e-03],
[-8.0104e-03, -5.4385e-02, -9.2490e-02]],
[[-6.0090e-02, 1.9389e-01, 4.6246e-02],
[-1.6257e-01, -5.8666e-02, -7.9930e-02],
[-1.3180e-01, 3.6301e-03, 2.7264e-02]]],
[[[ 1.8648e-01, 1.0295e-02, -3.0715e-02],
[-9.8297e-03, -6.5428e-02, -5.7085e-02],
[-1.3144e-01, -4.8919e-02, -9.6688e-02]],
[[-7.7845e-02, -1.2643e-01, -2.2352e-01],
[-4.5848e-02, -4.1410e-02, -4.0259e-01],
[-2.2656e-01, -1.6218e-01, -3.7761e-01]],
[[-1.9826e-01, 7.6993e-03, -1.1905e-01],
[-2.2027e-01, -7.4554e-02, -1.3539e-01],
[-9.1834e-02, 7.2170e-02, -1.3818e-02]],
...,
[[-5.3931e-03, 3.5108e-03, -2.1693e-02],
[ 5.7049e-02, 1.2556e-01, 6.3149e-02],
[ 8.6485e-02, -2.5913e-02, 8.1149e-02]],
[[-8.7943e-02, 5.6119e-03, 2.0607e-02],
[-8.7224e-02, 1.0640e-01, -1.3289e-02],
[ 7.9077e-02, 6.4294e-02, 2.4168e-02]],
[[-1.7558e-01, -1.2123e-01, -3.2048e-01],
[-1.1804e-01, -1.0303e-01, -4.8767e-02],
[-8.0461e-02, -1.9039e-01, -9.3277e-02]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([-0.0270, -0.1619, -0.0238, 0.1218, -0.0663, 0.0559, -0.4301, 0.0449,
0.2539, 0.1466, 0.0507, 0.0778, 0.2633, 0.1794, 0.0849, -0.0180],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[ 0.0801, -0.1179, 0.0481],
[-0.1555, -0.1206, 0.0577],
[-0.0293, -0.0590, -0.0155]],
[[-0.0200, -0.0752, 0.0083],
[-0.1455, -0.2824, 0.0937],
[-0.0885, -0.1047, 0.0502]],
[[ 0.1925, 0.0085, -0.1026],
[ 0.1276, -0.1635, 0.0177],
[-0.0902, -0.0658, 0.0568]],
...,
[[ 0.0426, -0.0148, 0.0795],
[ 0.0109, -0.1920, 0.1909],
[-0.1924, -0.0586, 0.0323]],
[[ 0.1640, 0.1385, -0.0400],
[ 0.2079, -0.1984, -0.0801],
[-0.0152, -0.1921, 0.1254]],
[[ 0.1309, 0.2896, 0.0185],
[ 0.1683, 0.1423, -0.1829],
[ 0.0083, -0.2108, -0.2918]]],
[[[ 0.0523, 0.0561, 0.0648],
[ 0.0108, 0.1471, 0.0353],
[ 0.0667, 0.2570, 0.0853]],
[[ 0.0481, 0.1229, 0.0191],
[-0.0616, 0.1230, 0.1188],
[-0.1359, 0.0383, 0.0619]],
[[-0.1058, 0.0915, 0.1192],
[-0.1857, -0.0029, 0.1579],
[-0.2331, -0.0266, 0.0573]],
...,
[[ 0.0212, 0.0168, 0.0055],
[-0.0671, 0.0915, 0.1291],
[-0.0511, -0.1374, -0.0090]],
[[-0.1587, -0.0457, -0.0795],
[ 0.0010, 0.0689, 0.1037],
[-0.1822, -0.0146, -0.0279]],
[[ 0.1173, 0.0191, 0.0867],
[-0.0288, 0.0797, 0.1848],
[ 0.0870, 0.0678, 0.0839]]],
[[[-0.1003, -0.1273, -0.0626],
[-0.0418, 0.0528, -0.0630],
[ 0.0950, 0.3017, 0.0761]],
[[ 0.1539, 0.0952, -0.1005],
[ 0.0831, 0.1176, 0.1497],
[ 0.0373, 0.0621, 0.0866]],
[[ 0.0478, 0.0480, -0.2523],
[ 0.1095, 0.0495, -0.1327],
[ 0.0105, -0.0045, -0.1269]],
...,
[[ 0.2157, 0.0719, 0.0342],
[ 0.0616, -0.0416, 0.1715],
[ 0.0037, 0.0084, -0.1003]],
[[ 0.1354, -0.2223, -0.1305],
[ 0.1499, -0.2046, -0.1555],
[ 0.1275, 0.0353, -0.1741]],
[[ 0.0596, 0.0950, 0.0100],
[ 0.0965, -0.0729, -0.2452],
[-0.0189, -0.0934, 0.0030]]],
...,
[[[-0.1459, -0.1228, 0.0448],
[-0.1481, -0.1530, -0.0167],
[-0.1584, -0.1605, -0.0509]],
[[-0.1941, -0.0837, 0.1153],
[-0.1488, -0.1787, 0.0598],
[-0.1756, 0.0591, 0.0252]],
[[ 0.0866, -0.0928, 0.1085],
[ 0.0924, 0.0679, 0.1028],
[-0.0952, 0.1207, 0.1344]],
...,
[[-0.0223, -0.0594, -0.0392],
[-0.0556, -0.0565, 0.0734],
[-0.1875, -0.1800, -0.0338]],
[[-0.0340, -0.1138, 0.1194],
[-0.0324, -0.1690, 0.1920],
[ 0.0249, -0.1367, 0.1597]],
[[-0.2922, -0.2055, -0.0405],
[-0.2216, -0.1464, 0.0903],
[-0.0290, -0.2018, 0.0529]]],
[[[ 0.1127, 0.0504, -0.0362],
[ 0.1665, -0.0464, -0.0204],
[ 0.0820, -0.2008, -0.3247]],
[[ 0.0535, -0.1251, 0.1007],
[ 0.1592, 0.0372, -0.1528],
[ 0.0394, -0.1362, -0.1381]],
[[ 0.0288, 0.1088, 0.0053],
[ 0.0529, 0.2033, 0.0625],
[-0.1197, 0.1250, -0.0837]],
...,
[[ 0.0318, -0.0049, -0.0376],
[ 0.1219, 0.0730, 0.0496],
[-0.0398, 0.0411, -0.0811]],
[[ 0.1064, -0.1312, 0.0641],
[ 0.0460, -0.0167, 0.0164],
[ 0.0463, 0.0412, 0.0934]],
[[-0.0834, -0.2284, -0.3108],
[-0.0073, -0.1272, -0.2328],
[-0.0468, -0.0262, -0.0603]]],
[[[-0.0968, -0.0222, 0.0679],
[-0.0564, 0.0018, 0.1182],
[-0.1669, -0.1982, -0.2509]],
[[ 0.1118, 0.1362, -0.0464],
[-0.0472, 0.0485, -0.0256],
[ 0.0460, -0.1813, -0.1185]],
[[-0.0153, 0.0331, -0.1626],
[-0.1269, -0.0353, 0.0392],
[-0.1383, -0.2698, -0.2635]],
...,
[[ 0.0132, 0.0457, 0.0116],
[-0.0158, 0.0713, 0.1418],
[-0.0178, 0.0033, -0.0534]],
[[ 0.0750, -0.0901, -0.0558],
[-0.0665, 0.0279, 0.0024],
[ 0.1163, 0.1324, 0.0589]],
[[-0.0590, -0.1702, 0.0017],
[-0.1032, -0.0347, 0.1595],
[ 0.0347, 0.0891, 0.2138]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([-0.0688, -0.4263, -0.1669, -0.0126, 0.1463, 0.1608, 0.1689, -0.1235,
0.1925, -0.0428, 0.2012, 0.2522, 0.2119, 0.1578, -0.0245, -0.0478,
0.0553, 0.1653, -0.0107, 0.2094, 0.0854, 0.0870, -0.2152, -0.0326,
0.0680, 0.1283, 0.0914, 0.1671, 0.1941, -0.0083, 0.0314, 0.1580],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[-0.2097, -0.2181, -0.2433],
[-0.1421, -0.3880, -0.2583],
[-0.1964, -0.1455, -0.0776]],
[[-0.2023, -0.1003, 0.0138],
[ 0.0037, 0.0636, 0.0399],
[-0.0785, -0.1140, -0.0051]],
[[ 0.1986, -0.0510, -0.0329],
[ 0.1831, 0.0033, 0.0300],
[ 0.1269, 0.0128, 0.0158]],
...,
[[-0.0814, -0.0112, -0.0343],
[-0.0422, -0.0486, -0.0586],
[-0.0407, -0.0692, -0.0744]],
[[ 0.0251, -0.0350, -0.0608],
[ 0.0925, 0.0770, 0.0700],
[ 0.0553, -0.0202, -0.0149]],
[[-0.0618, -0.0990, -0.0564],
[ 0.0449, 0.0202, -0.0462],
[-0.0061, -0.0077, -0.0409]]],
[[[-0.1484, -0.0006, 0.1011],
[-0.1804, -0.0861, -0.0118],
[-0.0028, 0.0563, 0.1186]],
[[-0.2395, -0.3122, -0.2750],
[-0.4028, -0.3027, -0.2652],
[-0.1452, -0.1631, -0.2770]],
[[-0.3153, -0.0304, 0.1760],
[-0.1870, -0.0494, -0.1017],
[-0.2770, -0.0852, 0.0504]],
...,
[[-0.1049, 0.0107, -0.0881],
[-0.0640, -0.1144, -0.1904],
[-0.0663, -0.3725, -0.2886]],
[[ 0.1722, 0.0612, -0.0665],
[ 0.1267, -0.1137, -0.0866],
[ 0.0353, -0.0942, -0.1356]],
[[-0.0970, 0.0180, -0.0167],
[-0.1546, -0.0548, 0.1230],
[-0.1265, -0.1736, -0.0058]]],
[[[-0.0752, 0.0225, 0.1765],
[-0.0909, -0.1382, -0.0053],
[-0.0694, -0.2120, -0.0708]],
[[-0.2527, -0.2336, -0.1128],
[-0.0669, -0.1764, -0.1285],
[-0.0174, -0.0774, -0.0861]],
[[ 0.0314, -0.1377, 0.0195],
[-0.1280, -0.0520, -0.0459],
[ 0.0714, -0.0728, -0.0720]],
...,
[[-0.1601, 0.0704, -0.0401],
[ 0.0498, 0.1774, -0.0596],
[ 0.0598, 0.1018, 0.0019]],
[[ 0.0485, -0.0809, -0.2485],
[-0.0449, -0.1697, -0.1677],
[ 0.0783, 0.0628, 0.1005]],
[[ 0.1006, 0.1441, 0.0503],
[ 0.0264, 0.0407, 0.0634],
[-0.1105, -0.0684, -0.0224]]],
...,
[[[-0.1170, -0.0235, 0.0743],
[-0.1174, 0.0616, 0.1269],
[-0.2068, -0.1633, -0.1226]],
[[-0.0098, 0.0742, -0.0101],
[ 0.0391, -0.0756, -0.0930],
[-0.1676, -0.2218, -0.2937]],
[[-0.0217, -0.0781, -0.0956],
[ 0.1174, -0.0580, -0.0758],
[ 0.0662, -0.0544, -0.0931]],
...,
[[ 0.0104, -0.0510, -0.0166],
[ 0.1092, 0.0769, -0.2451],
[ 0.1209, -0.1344, -0.0927]],
[[ 0.1504, 0.0743, -0.0047],
[ 0.2691, -0.1137, -0.1947],
[ 0.0456, 0.1179, 0.0875]],
[[ 0.1952, 0.1237, -0.0322],
[-0.1987, -0.2707, -0.2815],
[ 0.0314, -0.0714, -0.0525]]],
[[[-0.0178, -0.1563, -0.0954],
[-0.0136, -0.4162, -0.0841],
[ 0.0743, -0.2112, -0.1593]],
[[-0.2219, -0.1973, -0.0080],
[-0.2982, -0.3315, -0.0128],
[-0.0600, 0.0027, -0.0250]],
[[-0.0732, 0.0946, -0.1047],
[-0.0333, 0.0048, -0.1230],
[-0.0126, -0.0391, -0.0655]],
...,
[[ 0.0286, 0.0804, -0.0111],
[ 0.0837, 0.1738, 0.0712],
[ 0.0848, 0.1575, 0.0614]],
[[ 0.0286, -0.1396, -0.0750],
[-0.0599, -0.1374, 0.0528],
[-0.0141, -0.1165, 0.0592]],
[[-0.0649, 0.0221, 0.0016],
[-0.0463, -0.0647, -0.1248],
[ 0.1174, 0.0327, -0.0551]]],
[[[-0.0840, -0.0277, -0.1315],
[-0.0575, 0.0289, -0.0644],
[-0.1417, -0.0666, -0.1395]],
[[-0.0425, 0.0353, -0.0732],
[ 0.0120, 0.0039, -0.0750],
[ 0.0454, 0.0196, -0.0587]],
[[-0.0666, -0.1777, 0.0782],
[-0.1539, -0.0156, 0.0046],
[-0.0041, -0.0730, 0.0015]],
...,
[[-0.0217, 0.0005, -0.0672],
[ 0.0636, -0.0123, -0.0317],
[ 0.0359, 0.0255, -0.0466]],
[[-0.0164, 0.0084, -0.1469],
[-0.0119, -0.0647, -0.0086],
[-0.0468, -0.0886, -0.0535]],
[[-0.0743, -0.0898, 0.0155],
[ 0.0204, -0.0739, -0.1029],
[-0.0483, -0.0857, -0.0249]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([ 0.2024, 0.0516, 0.0978, -0.1629, -0.0722, 0.0201, 0.0590, 0.1048,
0.0721, 0.1597, 0.0712, 0.1242, 0.0008, 0.0907, -0.0543, -0.0545,
-0.0526, 0.1295, 0.1057, -0.0193, 0.0685, -0.0712, 0.1948, 0.1354,
0.1008, 0.0679, 0.0693, -0.0894, 0.1315, -0.0267, 0.1552, -0.1131],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[ 0.0238, -0.0477, -0.1724],
[-0.0344, -0.0347, -0.0350],
[ 0.0702, 0.1101, 0.0475]],
[[ 0.0271, 0.0541, -0.1431],
[ 0.0396, 0.1877, 0.1540],
[ 0.0338, -0.1657, 0.1493]],
[[ 0.0831, 0.1192, 0.1968],
[ 0.0962, 0.0429, 0.0093],
[-0.1334, -0.0324, 0.2035]],
...,
[[ 0.0803, 0.0584, 0.1442],
[ 0.3395, 0.2122, 0.0811],
[-0.0172, -0.0808, -0.0760]],
[[ 0.2134, 0.0539, 0.1579],
[ 0.0498, -0.0331, -0.0211],
[ 0.0440, -0.0731, -0.0387]],
[[ 0.0523, -0.0131, 0.0264],
[ 0.0112, -0.0176, -0.0100],
[-0.0757, 0.0384, -0.0537]]],
[[[-0.2265, -0.2013, -0.0088],
[-0.1149, -0.0883, 0.0683],
[-0.0828, -0.1258, 0.0285]],
[[ 0.0069, -0.1075, -0.3850],
[ 0.0223, -0.0399, -0.2387],
[ 0.1824, 0.3474, 0.0679]],
[[-0.0274, 0.0755, 0.1524],
[-0.0762, -0.0523, 0.1378],
[ 0.0319, -0.0278, -0.0800]],
...,
[[ 0.0756, -0.0496, 0.0357],
[-0.0932, -0.0880, -0.0212],
[-0.2504, -0.1821, -0.0117]],
[[-0.1029, -0.2875, -0.3316],
[ 0.1214, 0.0560, -0.0760],
[ 0.1118, 0.1817, 0.0106]],
[[ 0.0323, -0.0008, 0.0017],
[ 0.0182, 0.0274, 0.0442],
[-0.0161, -0.0338, -0.0919]]],
[[[ 0.0126, 0.0198, 0.0879],
[-0.0337, -0.1401, 0.0030],
[-0.1059, -0.1246, 0.0048]],
[[-0.0239, 0.0700, 0.2441],
[-0.1300, -0.3957, -0.0677],
[ 0.0760, -0.1859, -0.1278]],
[[-0.0197, 0.0215, 0.0036],
[-0.1090, -0.0363, 0.0407],
[-0.2067, -0.0508, -0.0942]],
...,
[[ 0.0030, 0.0354, 0.1850],
[-0.0128, -0.1063, 0.0678],
[-0.1085, -0.0017, 0.0502]],
[[-0.0547, -0.0516, -0.0241],
[-0.0947, -0.2955, -0.1562],
[ 0.2521, 0.0508, 0.0280]],
[[ 0.1144, 0.0358, -0.0809],
[ 0.0399, -0.0243, 0.0499],
[-0.0177, -0.0020, -0.1073]]],
...,
[[[ 0.0517, 0.1665, 0.0565],
[-0.0507, -0.1344, -0.1136],
[ 0.0345, 0.0908, 0.2529]],
[[-0.0574, 0.0455, 0.1663],
[ 0.0567, 0.0454, -0.0104],
[ 0.1076, -0.0840, -0.2339]],
[[-0.0534, -0.1046, -0.1157],
[ 0.0241, 0.0120, 0.0048],
[ 0.0500, 0.1388, 0.1061]],
...,
[[ 0.1877, 0.1176, 0.1173],
[ 0.2501, 0.2188, 0.1598],
[ 0.3149, 0.1537, 0.0630]],
[[ 0.0719, 0.2229, 0.1799],
[ 0.0513, 0.0934, -0.1419],
[-0.0952, -0.2729, -0.4649]],
[[ 0.0351, 0.0484, 0.0180],
[ 0.1682, 0.1096, 0.0269],
[ 0.1827, 0.1527, 0.0251]]],
[[[ 0.0495, -0.0812, -0.1832],
[ 0.0992, 0.0726, -0.1131],
[-0.2019, -0.1889, 0.0256]],
[[ 0.1323, 0.1413, -0.0490],
[ 0.0712, 0.0823, 0.0252],
[ 0.0760, 0.0258, -0.0263]],
[[ 0.1377, -0.2025, -0.0481],
[-0.0401, -0.2359, -0.1777],
[ 0.0868, -0.0569, 0.0258]],
...,
[[-0.1473, 0.0201, 0.0275],
[-0.3981, -0.1715, -0.1481],
[-0.0618, -0.1748, 0.0519]],
[[-0.1101, 0.0404, 0.0027],
[-0.0153, 0.1332, 0.1283],
[ 0.0202, 0.1016, 0.0674]],
[[-0.0499, -0.0666, 0.0512],
[-0.0956, -0.0720, -0.0363],
[ 0.0935, 0.0024, -0.0280]]],
[[[ 0.0247, -0.0936, 0.0527],
[-0.1044, -0.1499, -0.0377],
[-0.0631, -0.0762, 0.0670]],
[[ 0.1071, 0.0396, 0.1947],
[ 0.0938, -0.0182, 0.0010],
[ 0.0354, -0.1208, -0.1479]],
[[ 0.0689, -0.0402, -0.1694],
[ 0.0473, 0.0450, 0.0421],
[ 0.0858, 0.1872, 0.0791]],
...,
[[ 0.1101, -0.0265, -0.0714],
[ 0.0362, -0.0666, -0.1749],
[ 0.0570, 0.1680, 0.0549]],
[[ 0.1201, 0.1541, 0.1321],
[ 0.0173, 0.1180, 0.0295],
[-0.2069, -0.1186, 0.0114]],
[[-0.0020, 0.0799, 0.0082],
[ 0.1340, 0.0316, 0.0158],
[ 0.1240, -0.0651, -0.1066]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([ 0.0598, 0.0596, -0.0564, -0.0255, 0.0306, 0.2465, 0.0776, 0.2097,
-0.0101, 0.1601, -0.0122, -0.0171, 0.0098, 0.1922, -0.0124, 0.1005,
-0.1020, 0.0270, 0.0311, -0.1488, 0.0020, 0.0755, -0.0505, 0.0290,
0.2716, -0.0328, 0.0538, 0.2167, -0.2017, -0.1401, 0.0416, 0.0800],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[ 0.0034, -0.0276, -0.0297],
[ 0.0313, -0.0724, -0.2901],
[-0.1112, -0.2392, -0.0648]],
[[-0.4194, -0.2442, 0.0230],
[-0.0618, -0.1114, -0.1477],
[ 0.2796, 0.0797, 0.0441]],
[[-0.0065, -0.0206, -0.0640],
[-0.0547, 0.0649, -0.1749],
[-0.0913, 0.0523, -0.2166]],
...,
[[ 0.0313, -0.2239, -0.1281],
[-0.1714, 0.0427, 0.1539],
[-0.1448, 0.1072, 0.0774]],
[[-0.1775, -0.1759, 0.1496],
[-0.0702, 0.0103, 0.1881],
[ 0.1969, 0.0647, 0.0676]],
[[ 0.0749, -0.0393, -0.0492],
[ 0.0511, 0.1088, 0.1555],
[-0.2510, -0.0455, 0.1559]]],
[[[ 0.1153, 0.0866, 0.1235],
[ 0.0051, 0.0100, -0.0287],
[ 0.0450, 0.0149, -0.0470]],
[[-0.1409, -0.0702, -0.2218],
[-0.2270, -0.1239, 0.0172],
[-0.0140, -0.0307, 0.0053]],
[[-0.1474, -0.0765, -0.0078],
[-0.1148, -0.1199, 0.0283],
[-0.0036, 0.0099, -0.0313]],
...,
[[-0.0128, -0.1776, 0.0160],
[-0.1724, -0.1375, 0.0469],
[-0.3009, -0.2205, -0.0766]],
[[ 0.0376, -0.0603, 0.0647],
[-0.0473, -0.0297, -0.0085],
[-0.0627, -0.1120, -0.1262]],
[[ 0.1766, 0.0740, 0.0088],
[ 0.0115, -0.1766, -0.1837],
[-0.1814, -0.3422, -0.1971]]],
[[[-0.1404, -0.1373, -0.1339],
[-0.0193, -0.1387, 0.0576],
[ 0.0865, 0.0101, 0.0464]],
[[-0.0965, -0.0876, -0.1624],
[-0.1655, 0.1415, 0.0107],
[-0.0338, 0.0505, 0.0142]],
[[ 0.1003, -0.0749, -0.0559],
[-0.0512, 0.0045, 0.0265],
[ 0.0015, 0.0104, -0.0520]],
...,
[[ 0.0073, 0.1127, 0.1278],
[ 0.0574, 0.0408, 0.0893],
[-0.0527, 0.0453, -0.0985]],
[[ 0.0417, -0.1103, -0.0410],
[-0.1548, -0.0331, -0.0413],
[-0.1304, 0.0752, 0.3257]],
[[-0.0504, 0.0943, 0.2064],
[-0.0008, 0.0483, 0.0651],
[ 0.0400, 0.0134, 0.0249]]],
...,
[[[ 0.0962, -0.0319, -0.0347],
[-0.0814, -0.1259, 0.0171],
[-0.2403, -0.1235, -0.0376]],
[[ 0.1601, 0.1862, 0.0021],
[-0.0227, -0.0564, -0.0793],
[ 0.0861, 0.0183, -0.0336]],
[[-0.0903, 0.1196, 0.1325],
[ 0.1078, 0.0647, 0.1213],
[ 0.0545, 0.1261, 0.0433]],
...,
[[ 0.0190, -0.0614, 0.0928],
[-0.0371, 0.1190, -0.0023],
[-0.0608, 0.0713, 0.0066]],
[[ 0.1844, 0.0451, 0.0460],
[-0.0564, 0.0107, 0.0403],
[ 0.0248, 0.0634, 0.1143]],
[[-0.0672, -0.1853, -0.1974],
[-0.2927, -0.3370, -0.2172],
[-0.3009, -0.2306, -0.0734]]],
[[[-0.0294, 0.0732, -0.1780],
[-0.0162, 0.0324, -0.1264],
[-0.1870, -0.2282, -0.1835]],
[[-0.0099, 0.1035, 0.2604],
[ 0.0982, -0.0206, 0.0321],
[ 0.1217, 0.0826, -0.0516]],
[[-0.1557, -0.1371, 0.0684],
[-0.0652, 0.0628, -0.2270],
[-0.0247, 0.0427, -0.1819]],
...,
[[-0.1131, 0.0260, 0.1879],
[-0.2493, -0.4532, -0.2614],
[-0.2523, -0.1086, -0.1740]],
[[-0.1036, 0.0328, -0.0511],
[-0.1146, 0.0249, 0.1152],
[-0.0580, 0.0172, 0.1337]],
[[ 0.0529, 0.0116, -0.1118],
[ 0.1394, 0.0214, -0.0807],
[-0.0311, 0.0015, 0.1078]]],
[[[-0.0480, 0.0073, -0.1723],
[-0.0993, -0.0648, -0.2503],
[ 0.0203, 0.0307, -0.3588]],
[[-0.1052, 0.0246, 0.2386],
[ 0.0593, 0.0490, 0.1928],
[ 0.0140, 0.1251, 0.1033]],
[[ 0.0874, -0.0854, 0.0629],
[-0.0085, -0.0175, -0.1498],
[-0.1255, -0.0903, -0.1796]],
...,
[[-0.1393, -0.0937, -0.0379],
[-0.1053, -0.1073, -0.3177],
[-0.0147, -0.0408, -0.1192]],
[[-0.2175, 0.0356, 0.1147],
[-0.0552, 0.1805, 0.2638],
[-0.0040, 0.1276, 0.2239]],
[[-0.1455, -0.0551, -0.1712],
[-0.1491, -0.1429, -0.2645],
[-0.0663, -0.1496, -0.1998]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([ 0.0101, 0.2470, 0.0948, -0.0351, 0.0519, -0.0781, 0.1144, -0.0203,
0.1186, 0.1027, 0.1097, 0.0442, -0.0807, -0.0621, 0.0777, 0.0653,
0.0792, -0.0668, -0.0659, 0.0155, 0.0115, 0.1477, 0.0839, 0.1035,
0.0185, 0.1511, -0.0076, 0.1899, -0.1049, -0.1932, 0.0644, 0.0467],
device='cuda:0')),
('fc_layers.0.weight',
tensor([[ 0.0748, 0.2407, -0.0888, ..., -0.0323, -0.0743, 0.1044],
[-0.0079, 0.0119, 0.0212, ..., -0.0375, -0.0609, -0.0446],
[ 0.0960, 0.0183, -0.1151, ..., 0.0183, -0.0196, 0.1807],
...,
[-0.0432, 0.0056, -0.0308, ..., -0.0155, 0.0116, -0.0294],
[ 0.0036, 0.0507, 0.0003, ..., 0.1176, 0.1415, -0.0490],
[ 0.0599, 0.2012, 0.0010, ..., -0.0476, -0.0919, -0.1604]],
device='cuda:0')),
('fc_layers.0.bias',
tensor([-1.1323e-02, 1.8307e-02, -8.0392e-02, 2.4373e-02, -3.7611e-02,
-6.7724e-02, 1.6819e-01, -6.9618e-02, -1.1922e-01, -1.3658e-02,
-6.6031e-02, 3.1551e-02, 2.6732e-02, -1.6264e-02, 2.2340e-03,
9.6592e-02, -8.0250e-03, 2.8152e-02, 1.1288e-01, 6.4993e-02,
-2.3539e-02, -7.2087e-03, 9.7141e-02, -7.1714e-02, 1.0695e-02,
1.5893e-01, -4.0180e-02, -9.6805e-03, 2.4917e-02, -2.7784e-03,
-3.4120e-02, -1.7220e-01, 5.9890e-03, 1.2522e-02, 7.0681e-02,
8.8772e-02, 8.8292e-02, -3.7109e-02, -4.2240e-02, 1.3068e-02,
1.5937e-02, -7.1093e-03, 2.7966e-02, -7.6395e-03, 5.7668e-02,
5.9637e-03, -3.8081e-03, 8.7168e-02, 1.5097e-01, -4.9129e-02,
-5.0052e-02, -7.1569e-02, -3.9406e-02, 4.5039e-02, -5.6554e-02,
-5.8886e-02, 1.4915e-01, 3.0133e-01, -2.1310e-02, -6.2378e-02,
3.6388e-02, 2.9415e-02, 3.8331e-02, -6.4916e-02, 1.0779e-02,
-5.5355e-02, 1.1832e-01, -2.5977e-02, -5.6045e-02, -6.0273e-02,
2.2774e-03, -6.3353e-02, -2.2707e-02, -5.9718e-02, -2.5484e-02,
-7.5523e-02, 3.4751e-02, 2.3922e-02, 9.5553e-02, 2.3528e-01,
1.1914e-01, 1.3778e-01, 6.6263e-03, -3.0842e-02, -1.5482e-02,
-6.4052e-02, -1.5443e-02, 3.4474e-02, -7.7337e-02, 1.9192e-02,
-1.8952e-02, -5.1178e-02, -2.6742e-02, 1.0296e-01, -1.1496e-01,
4.9860e-03, 1.8153e-02, -5.7566e-02, 3.5770e-02, -8.7411e-03,
2.8693e-01, -3.2843e-02, -1.8917e-02, -1.5041e-01, 2.1372e-01,
-1.3957e-02, -5.8583e-02, 1.6726e-02, -2.9911e-01, 8.0506e-02,
-2.8679e-02, -2.4618e-02, -6.5612e-02, 9.5037e-03, 5.7764e-02,
1.1592e-01, 7.8534e-02, 3.1456e-02, -4.5543e-02, 1.9940e-02,
-5.4133e-02, -8.0943e-02, -7.5527e-02, 1.0130e-01, -6.1217e-02,
-9.6057e-02, -3.6912e-02, 1.0719e-01, -1.3677e-02, -5.3515e-02,
9.7055e-03, -3.4659e-02, 1.7669e-02, 1.1708e-01, -4.2841e-02,
1.6350e-02, 1.9512e-01, -4.2334e-02, -3.2830e-02, -4.9285e-02,
-2.6977e-02, -6.4652e-02, -6.5182e-02, -4.4463e-02, 2.4866e-01,
1.5180e-02, 1.0121e-01, -1.3276e-01, 6.7413e-02, 2.3527e-01,
-2.8209e-02, -4.2964e-02, -1.5527e-02, 1.3099e-01, -6.0577e-02,
-1.7041e-02, -6.8213e-02, 4.4344e-03, 1.7386e-02, 1.3683e-02,
-5.1613e-02, 1.2631e-02, -1.0801e-01, -4.0936e-02, -6.8203e-02,
4.1138e-02, 1.5231e-01, -2.0855e-02, -6.0092e-03, -5.5496e-02,
1.3291e-02, 1.9900e-01, 1.0444e-01, 6.1170e-02, -8.5239e-02,
-8.5584e-02, 7.3607e-02, 8.0026e-02, -4.5512e-02, 1.2678e-01,
-3.5823e-02, -8.7706e-03, -8.6323e-02, 7.5242e-02, -3.7904e-02,
-5.8241e-02, -8.0455e-02, -4.2886e-03, 4.2705e-02, 2.3695e-03,
-4.5292e-03, 1.8353e-01, 8.6734e-02, -7.7738e-02, -3.6741e-02,
-2.5291e-02, -4.3170e-02, 8.8191e-02, 3.6915e-02, 1.4687e-03,
1.0358e-03, -6.6342e-02, -3.8641e-02, 3.9679e-02, -4.3622e-02,
-2.9955e-02, 1.6208e-01, -8.4218e-02, -4.7927e-02, 1.5935e-01,
1.2957e-01, -7.1631e-04, -2.0972e-02, -6.0297e-02, 6.8084e-02,
-7.7350e-02, -4.8851e-02, 1.0599e-02, 5.6885e-02, 2.0742e-02,
1.0239e-01, 8.8364e-02, -2.5115e-04, 1.4053e-01, -5.1924e-02,
1.9315e-01, 1.8825e-01, 1.1249e-01, -8.7462e-02, -9.3927e-02,
8.5069e-02, 2.4014e-01, 1.2567e-01, -2.2660e-02, -7.2397e-03,
-1.4148e-02, -8.2538e-02, 5.9697e-02, -5.7918e-02, 3.1139e-02,
1.6076e-02, -7.5907e-02, -6.1468e-02, -3.9899e-02, 6.2619e-02,
-2.0819e-02, 4.7522e-02, -4.4200e-02, -1.8471e-02, -2.6866e-03,
-9.2977e-02, 3.5178e-02, 1.9216e-01, -6.2888e-02, 1.8371e-01,
6.5247e-02], device='cuda:0')),
('fc_layers.2.weight',
tensor([[ 0.0277, -0.0177, -0.1214, ..., -0.0606, 0.0036, -0.0363],
[ 0.1159, 0.0474, -0.1172, ..., -0.0325, 0.0957, 0.0739],
[-0.0335, -0.0452, 0.0428, ..., 0.0077, -0.0138, -0.0721],
...,
[-0.0245, 0.0255, 0.1249, ..., 0.0231, -0.1113, 0.0660],
[-0.0884, -0.0127, 0.0583, ..., 0.0249, 0.1646, -0.2316],
[-0.0251, -0.0215, 0.1243, ..., -0.0230, -0.0833, -0.0893]],
device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.0410, 0.0728, 0.2260, 0.0530, 0.0844, -0.0621, 0.0057, -0.2532,
-0.0130, -0.0903], device='cuda:0'))])},
{'ratio': 0.42,
'bias': 128,
'train_losses': [285.1389265027138,
250.6316996467467,
215.89075438035928,
199.1574290531052,
189.5198747753889,
182.0144418415599,
178.17157429425504,
173.58883229787438,
170.62618317820431,
168.74816889517388,
166.58926164188517,
164.5327277119039,
162.85283121263794,
162.12717953922444,
161.8844937262943],
'test_losses': [273.67988640654323,
227.59790278416054,
202.33528833763273,
187.3447727502561,
182.60712699329153,
175.12529101091272,
174.60608789967554,
166.4976983163871,
165.70475588592828,
164.0336905928219,
160.8958587506238,
159.45016036314124,
161.01810199606652,
155.2010343355291,
154.75456057342828],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[-9.9676e-02, -1.4772e-01, 2.0039e-01],
[ 1.9741e-01, -4.0532e-02, 1.3053e-01],
[-1.0840e-01, -1.1514e-01, -2.1340e-01]],
[[ 1.4692e-01, -4.1714e-02, 1.9065e-01],
[ 1.6782e-01, 8.3395e-02, 1.3147e-01],
[ 2.6874e-01, 2.0027e-02, -1.3748e-01]],
[[-3.3983e-01, -1.6588e-01, -6.3259e-02],
[-1.3729e-01, 8.4568e-02, 1.5760e-01],
[ 8.9882e-02, -3.6973e-02, -1.6460e-02]]],
[[[ 7.3458e-02, -1.2165e-01, -1.4229e-01],
[ 2.6165e-01, 1.2990e-01, -1.9915e-01],
[ 1.7597e-01, -1.2260e-01, -5.9553e-03]],
[[ 1.8522e-01, -2.0563e-01, -6.6299e-02],
[ 3.3441e-01, 5.8475e-02, -2.9869e-01],
[ 1.2232e-02, -6.4396e-02, -9.2834e-02]],
[[ 1.0887e-01, -1.3229e-01, -2.4419e-02],
[ 1.6301e-01, -3.9062e-02, -2.3413e-02],
[ 4.5611e-02, 1.7248e-02, -1.8960e-02]]],
[[[ 2.1927e-01, -1.6989e-01, -9.8431e-02],
[ 2.5552e-01, -2.2863e-02, 6.8530e-02],
[-4.0456e-02, -1.3555e-01, -2.5376e-01]],
[[ 1.5320e-01, 4.1315e-02, -3.2624e-02],
[ 3.3915e-01, 1.4009e-01, -4.8692e-02],
[ 1.1200e-01, -1.7346e-01, -8.2014e-02]],
[[ 1.7170e-01, -1.0149e-01, -6.7533e-02],
[ 7.1208e-02, 5.9813e-02, -1.5404e-04],
[ 2.2892e-02, -2.2176e-01, -1.8598e-01]]],
[[[ 2.4659e-02, 1.1156e-01, 2.4038e-01],
[-1.6026e-01, -4.9802e-02, 1.6417e-01],
[-3.5067e-01, 3.3833e-02, -7.6922e-02]],
[[ 6.4113e-02, 9.3446e-02, 2.5836e-02],
[-2.8834e-01, 1.2990e-01, 1.1189e-01],
[-1.5172e-01, -1.2603e-01, 1.3562e-01]],
[[-8.6942e-03, 1.6441e-01, 1.2966e-01],
[-1.8361e-01, 5.8454e-02, -1.2062e-02],
[-8.1308e-02, -8.2802e-02, 2.7608e-02]]],
[[[ 1.5424e-01, 2.6101e-01, -1.0786e-01],
[-1.7327e-03, 1.0812e-01, -1.6977e-01],
[-2.1608e-01, -5.3782e-02, -1.3191e-01]],
[[ 3.8894e-01, -2.5161e-02, 7.2800e-02],
[ 4.6832e-02, 2.3264e-01, -1.9301e-01],
[-1.0619e-01, 1.2675e-01, -1.2102e-01]],
[[ 1.2778e-01, -5.4488e-02, 1.1555e-01],
[ 4.1491e-02, -4.4969e-02, 4.9205e-04],
[-8.2446e-02, -1.7046e-01, -1.6012e-01]]],
[[[-5.4265e-02, -2.5959e-03, 3.3499e-03],
[ 1.8192e-01, 1.4963e-01, -1.7185e-02],
[-6.1247e-02, -1.0033e-01, -1.7217e-01]],
[[ 6.8427e-02, 8.8063e-02, 1.3519e-02],
[ 9.6377e-02, 2.1561e-01, -4.6685e-02],
[-8.6737e-02, -1.6418e-01, -2.8529e-02]],
[[ 1.3096e-01, -1.7352e-01, 8.6796e-02],
[ 1.4645e-01, 2.3975e-02, -8.3695e-02],
[ 3.7444e-02, -2.1904e-01, 6.7060e-02]]],
[[[ 1.5432e-01, 3.1789e-01, -5.9533e-02],
[ 6.6438e-02, 8.5367e-02, -7.2623e-03],
[-1.0388e-01, -1.5488e-01, -1.4593e-01]],
[[ 1.8221e-01, 1.9216e-01, 9.6523e-02],
[-1.6175e-01, -2.1671e-01, -1.2447e-01],
[-1.1871e-01, -1.6719e-01, -2.2817e-01]],
[[ 9.9435e-02, 1.0448e-01, -1.0775e-01],
[ 1.7458e-01, -9.3060e-02, 1.6623e-01],
[-6.3354e-02, 2.0149e-02, 5.0630e-02]]],
[[[-1.4629e-01, -2.4108e-01, -2.0748e-01],
[ 8.0266e-02, -6.2561e-03, 1.4479e-01],
[ 2.9333e-01, 4.4299e-02, -6.2038e-02]],
[[-1.0359e-01, -2.3218e-01, -2.8570e-01],
[-3.2398e-02, -5.7277e-03, 8.2269e-02],
[ 1.0029e-01, 2.5098e-01, 2.8965e-01]],
[[-2.5234e-01, 2.5483e-02, 4.3617e-02],
[-1.3960e-02, 1.7879e-01, -1.6826e-01],
[ 1.0097e-01, 1.2657e-01, -1.0111e-02]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([-0.0158, 0.1642, -0.2942, 0.1914, 0.2599, 0.4624, 0.0778, 0.2034],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[-3.0391e-02, 3.3893e-02, -9.3462e-02],
[ 1.8216e-01, -4.5735e-02, -3.1131e-02],
[-1.4125e-01, -9.4639e-02, 1.9606e-02]],
[[ 6.4442e-02, 9.5374e-02, 2.3926e-02],
[ 1.3002e-01, -1.9873e-03, 2.6798e-02],
[-1.0267e-02, -2.6880e-02, 2.1287e-02]],
[[-1.1004e-01, -1.0725e-01, -2.3528e-01],
[-2.0532e-01, -2.2295e-01, -3.1493e-01],
[-1.3911e-01, -1.8467e-01, -4.7402e-01]],
...,
[[-1.0859e-01, 1.5449e-01, -7.0566e-02],
[ 1.6370e-01, 2.2568e-01, 7.0333e-02],
[ 1.1289e-01, -2.1847e-02, -8.9368e-02]],
[[-6.7876e-02, -6.3843e-03, 3.0128e-02],
[ 5.4982e-03, 8.3636e-02, -2.9484e-02],
[ 2.2520e-01, 5.0229e-02, -1.2548e-01]],
[[ 3.4664e-02, 5.8245e-02, -5.3172e-02],
[-2.2736e-01, -3.0612e-01, -1.5798e-01],
[-2.3554e-01, -4.4472e-01, -2.4035e-01]]],
[[[-1.1442e-01, -1.2869e-01, -5.9016e-02],
[-1.4997e-01, -3.9622e-02, -7.2212e-02],
[ 9.9701e-02, 1.4611e-01, 1.6688e-01]],
[[ 3.3732e-02, 5.4479e-02, -1.1919e-01],
[ 1.8205e-01, -7.7968e-02, -5.1633e-02],
[ 9.0416e-02, 1.3288e-01, 1.5656e-01]],
[[ 3.7070e-03, 4.3560e-04, -2.4579e-02],
[ 4.6082e-02, 9.0765e-02, 1.9325e-01],
[-9.8409e-02, 6.4688e-03, 4.2072e-03]],
...,
[[-6.4606e-02, -1.2633e-01, -1.8370e-01],
[ 1.3460e-01, -1.0929e-01, 6.0077e-03],
[ 2.6947e-02, -2.0298e-02, 1.4203e-01]],
[[-6.1968e-02, -1.2651e-01, 3.5795e-02],
[-2.0163e-01, -2.7434e-01, -9.0621e-02],
[-1.7429e-02, -1.6304e-01, 6.5877e-02]],
[[ 2.8516e-01, 1.4134e-01, 1.4637e-02],
[ 1.4901e-01, 1.2349e-02, 2.0955e-01],
[ 7.8983e-03, 6.6959e-02, 2.8762e-02]]],
[[[-2.1854e-01, 6.7247e-03, 4.2068e-02],
[-1.5828e-01, 4.1195e-02, 2.3450e-01],
[-5.3244e-02, 1.4720e-01, 1.0538e-01]],
[[-5.0724e-01, -2.7311e-01, 7.8565e-03],
[-4.1670e-01, -2.4468e-01, 1.5499e-01],
[-3.6792e-01, 1.1046e-01, 7.5097e-02]],
[[ 2.8898e-01, 1.9221e-01, -1.6997e-01],
[ 2.4353e-01, 1.2438e-01, -6.0543e-02],
[-2.3256e-02, -1.1478e-01, -3.6890e-01]],
...,
[[ 3.1321e-02, 9.5002e-02, 4.5950e-03],
[ 1.7982e-01, 2.5086e-01, 6.2689e-02],
[ 2.2928e-01, 1.6762e-01, 4.9035e-02]],
[[-3.6954e-01, -2.4891e-01, -1.2505e-01],
[-1.1463e-01, -2.1915e-01, -6.0056e-02],
[ 4.0434e-03, 1.7146e-01, 8.9875e-02]],
[[ 1.1819e-01, 8.1224e-02, -1.5503e-02],
[ 6.1323e-02, -7.4771e-02, 9.0580e-03],
[-2.4575e-01, -3.1410e-01, -1.5163e-01]]],
...,
[[[ 8.6052e-02, -1.3266e-01, 5.6995e-02],
[-2.0017e-01, 4.5635e-02, 8.9922e-02],
[-7.4845e-02, -8.4073e-02, 5.1787e-02]],
[[-1.8631e-01, -1.7019e-01, 2.6498e-01],
[-2.4711e-01, -1.0760e-01, 2.8424e-01],
[-3.1564e-01, -1.0631e-01, 2.8063e-01]],
[[ 1.0609e-02, 6.6681e-02, -2.9055e-02],
[-3.6018e-02, 1.3168e-01, 7.4530e-02],
[-9.0134e-02, 1.4019e-01, 9.0383e-02]],
...,
[[-3.8347e-03, -1.4661e-02, 9.8074e-03],
[-1.4252e-01, -7.5941e-02, 1.4248e-01],
[-9.5422e-02, -7.8444e-02, 1.4722e-01]],
[[-1.2931e-01, -9.1654e-02, -1.0259e-01],
[-1.4663e-01, -1.4644e-01, 1.2341e-01],
[-1.4924e-01, 7.7846e-02, 5.9201e-02]],
[[ 4.7826e-02, -5.5519e-02, 1.2357e-01],
[-1.4751e-01, -1.1858e-01, 1.4115e-01],
[ 5.3080e-02, -8.6993e-02, 8.6143e-02]]],
[[[ 1.4708e-01, 8.7792e-02, -1.1648e-01],
[-1.4685e-01, 1.5805e-02, 1.4042e-02],
[ 1.2768e-01, 3.0239e-01, 3.5332e-01]],
[[ 2.5458e-01, 1.9225e-02, 1.3413e-02],
[-2.5604e-02, -1.9206e-02, -1.2365e-01],
[-5.2139e-02, 1.7162e-01, 7.0035e-02]],
[[ 2.7558e-01, 4.7564e-02, -9.5407e-03],
[-1.2006e-01, 1.3848e-02, 1.5502e-01],
[-1.7453e-02, 1.6652e-01, 7.8007e-04]],
...,
[[-5.2710e-02, -1.7947e-01, -1.0478e-01],
[-9.4931e-02, -2.1341e-01, -1.5467e-01],
[-1.0275e-01, 6.1193e-02, -2.1902e-03]],
[[ 1.3862e-01, 6.6303e-02, -3.3801e-02],
[ 1.4871e-01, 6.9180e-02, 4.6617e-03],
[-5.8575e-02, -1.7123e-01, 1.4330e-02]],
[[-6.5026e-02, -1.1416e-04, -6.8056e-03],
[ 6.5093e-03, 1.2481e-01, -1.0737e-01],
[ 2.9920e-02, 2.4091e-01, 5.3593e-02]]],
[[[ 9.9573e-02, 1.4822e-01, 2.0779e-01],
[ 6.2829e-02, -2.1007e-02, 9.1475e-02],
[-3.0661e-02, 1.2520e-01, 1.7286e-01]],
[[ 2.1343e-03, 3.4069e-02, -2.6834e-01],
[ 8.3881e-03, -1.7035e-01, -2.5959e-01],
[-4.1112e-02, -3.0707e-01, -1.7338e-01]],
[[ 2.6229e-02, 1.5757e-01, -9.8542e-02],
[ 3.2347e-02, 1.5391e-01, 5.7733e-02],
[-2.5045e-02, -8.0028e-04, -8.1512e-02]],
...,
[[ 1.0705e-01, -1.5289e-01, -2.8897e-01],
[ 8.8914e-02, -1.3387e-01, -1.7536e-01],
[-2.8481e-02, -2.8490e-01, -1.1347e-01]],
[[ 1.2198e-01, -5.2002e-02, -6.7210e-02],
[ 2.7397e-03, -1.8666e-01, -1.9758e-01],
[-5.0162e-02, -1.8304e-01, -1.3078e-01]],
[[-7.2951e-03, 4.4317e-02, 2.4560e-02],
[ 1.1093e-01, 8.7906e-02, -8.0119e-02],
[ 1.4743e-01, 2.5596e-02, 3.7325e-02]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([-0.0299, -0.2446, 0.1134, -0.4752, -0.2764, -0.0334, -0.1216, -0.1072,
0.1232, 0.0985, 0.0403, 0.1891, 0.0148, 0.0601, -0.3391, -0.2504],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[ 0.0114, -0.0834, -0.0926],
[-0.0902, -0.0327, -0.0556],
[-0.0519, -0.0617, -0.0818]],
[[ 0.0332, -0.0576, 0.0839],
[-0.0457, -0.0734, 0.0671],
[-0.0089, 0.0075, 0.0348]],
[[-0.0718, -0.0986, -0.0357],
[ 0.0006, -0.1131, -0.0022],
[ 0.0134, -0.1118, -0.1262]],
...,
[[-0.0640, -0.1051, -0.0139],
[-0.0947, -0.1356, -0.0368],
[-0.0052, -0.0315, -0.0455]],
[[ 0.0234, -0.0618, 0.0503],
[ 0.0611, -0.0609, 0.0360],
[ 0.0823, -0.0219, 0.0621]],
[[-0.1098, 0.0102, -0.0093],
[ 0.0198, -0.0989, -0.1394],
[-0.0847, -0.1133, -0.1160]]],
[[[-0.1283, -0.0198, 0.0440],
[-0.0890, -0.2708, 0.0890],
[-0.1223, -0.1310, 0.0343]],
[[-0.0991, -0.0783, 0.1302],
[ 0.0172, 0.0407, 0.0953],
[-0.0010, -0.1058, -0.0040]],
[[-0.0613, 0.0302, -0.0111],
[-0.1220, -0.0069, 0.0734],
[-0.1089, 0.0538, 0.1875]],
...,
[[ 0.0934, 0.0191, 0.0574],
[-0.0464, 0.0138, 0.0710],
[-0.1018, -0.2010, 0.0233]],
[[ 0.0871, -0.0097, -0.1637],
[ 0.0053, 0.0118, -0.1278],
[ 0.0299, -0.0619, -0.1466]],
[[-0.0557, 0.0476, 0.0532],
[ 0.0259, 0.0633, 0.0635],
[-0.0886, 0.0025, 0.1064]]],
[[[ 0.0383, -0.0341, -0.1908],
[ 0.0590, 0.0605, -0.0016],
[ 0.0805, 0.0036, -0.2303]],
[[-0.0428, -0.0614, 0.0697],
[-0.1138, 0.0306, -0.0289],
[-0.0507, -0.0647, -0.0132]],
[[-0.1728, -0.1101, -0.0594],
[-0.1620, -0.4805, -0.2410],
[-0.1806, -0.2290, -0.0053]],
...,
[[-0.1386, -0.1273, 0.0314],
[-0.2323, -0.1833, 0.1924],
[-0.2441, -0.1102, 0.0280]],
[[-0.1925, -0.0652, 0.0292],
[-0.0387, -0.0375, -0.1008],
[-0.0462, -0.1157, -0.0068]],
[[-0.2096, -0.1616, 0.0638],
[-0.0798, 0.0405, 0.1980],
[-0.0119, -0.0047, -0.0621]]],
...,
[[[ 0.1656, 0.1853, -0.1876],
[-0.0295, -0.1179, -0.1813],
[-0.0791, -0.0421, -0.0689]],
[[-0.0991, -0.0204, 0.0141],
[ 0.0090, 0.0074, 0.0410],
[ 0.1629, -0.0822, -0.1481]],
[[-0.0725, -0.1946, -0.1847],
[-0.2244, -0.0147, 0.1759],
[-0.0279, 0.1687, 0.0620]],
...,
[[ 0.0784, -0.1205, 0.0605],
[-0.2487, 0.0363, 0.0242],
[-0.0499, 0.2372, -0.1022]],
[[-0.0325, -0.2982, -0.0593],
[-0.0143, -0.0025, -0.0228],
[ 0.0873, -0.1255, -0.1655]],
[[-0.1601, -0.0139, 0.1373],
[-0.0845, 0.0682, -0.0686],
[ 0.1483, 0.0815, -0.1304]]],
[[[-0.1890, -0.1949, 0.0412],
[-0.1366, 0.1014, 0.1214],
[-0.1167, 0.1333, 0.0108]],
[[ 0.0015, -0.0205, -0.0790],
[ 0.1030, -0.1386, -0.3110],
[ 0.0236, -0.2052, -0.0207]],
[[-0.0838, -0.0784, -0.0683],
[ 0.1941, 0.2155, -0.0508],
[ 0.2667, 0.0225, -0.0910]],
...,
[[-0.1124, 0.1168, 0.0514],
[-0.1778, -0.0673, -0.2315],
[-0.1480, -0.1354, 0.0076]],
[[-0.4772, -0.3387, -0.0203],
[ 0.0011, -0.1534, 0.0843],
[ 0.1105, -0.0626, 0.1227]],
[[-0.1879, 0.0674, 0.1506],
[-0.1089, -0.0791, 0.1764],
[-0.2107, -0.0270, 0.1641]]],
[[[ 0.0498, 0.1240, 0.0754],
[ 0.0365, -0.0639, -0.0118],
[ 0.0827, -0.0509, 0.2166]],
[[-0.0402, -0.2210, -0.0268],
[ 0.1018, -0.0834, 0.0728],
[ 0.0771, 0.1221, 0.1169]],
[[ 0.0809, 0.1023, 0.1043],
[ 0.1229, -0.1844, 0.1010],
[ 0.0792, -0.1303, 0.1592]],
...,
[[-0.0500, -0.1422, -0.0549],
[-0.0228, -0.2140, 0.0139],
[-0.0861, 0.1205, -0.0832]],
[[ 0.0692, 0.0588, 0.1563],
[-0.0491, -0.1168, 0.2529],
[ 0.0828, 0.0725, 0.0226]],
[[-0.0589, -0.0040, 0.0125],
[-0.0847, -0.1615, 0.0871],
[-0.1157, -0.0308, -0.0265]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([-0.1608, -0.2399, 0.0353, 0.0706, 0.0078, 0.1777, 0.0063, 0.0746,
0.0802, 0.1535, 0.0200, -0.0163, 0.1980, 0.2178, -0.2933, 0.1844,
-0.1328, 0.1277, -0.0428, 0.1142, 0.0326, 0.0342, 0.1435, 0.0674,
-0.1045, 0.0733, -0.2812, 0.1452, -0.1288, 0.2390, 0.3086, -0.0317],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[ 2.9832e-02, -5.5167e-02, -1.6785e-02],
[ 4.4833e-02, 3.2894e-02, -1.7550e-02],
[-2.3279e-02, 3.9515e-02, -1.3366e-02]],
[[-3.5735e-02, -1.3167e-01, -1.0195e-01],
[-8.6573e-02, -2.0611e-02, -4.4514e-02],
[-1.1252e-01, -1.8940e-01, -1.1584e-01]],
[[-2.8501e-01, -1.1046e-01, -1.7053e-01],
[ 1.2111e-01, 5.8293e-02, 1.1301e-02],
[ 1.0690e-01, 8.4467e-03, 1.0715e-01]],
...,
[[ 7.1979e-02, -9.9961e-02, -1.1411e-01],
[ 1.1745e-01, -8.6885e-02, -5.8556e-02],
[ 5.0053e-02, -1.6670e-01, 5.6355e-02]],
[[ 2.6940e-01, -3.5794e-03, -5.0048e-02],
[-7.5212e-03, -1.7955e-03, 1.0932e-01],
[-7.1337e-02, -3.9830e-02, -3.2789e-02]],
[[-3.6966e-02, -1.3275e-02, -4.3234e-02],
[-3.6092e-02, -5.0490e-02, -9.5692e-03],
[-7.4109e-02, -1.3720e-02, 8.6226e-03]]],
[[[ 3.3181e-02, -5.1053e-02, 6.7917e-02],
[-6.0512e-02, -2.6652e-02, -7.1411e-02],
[-3.5356e-02, 2.3469e-02, 1.4654e-02]],
[[-1.4875e-01, -1.9756e-01, -3.0823e-01],
[-4.0410e-02, 1.0218e-01, -2.6586e-02],
[-2.7989e-01, -7.6053e-02, 7.9785e-03]],
[[ 1.3634e-03, -2.1744e-02, -1.8098e-01],
[-3.6647e-01, -1.9330e-01, -2.1243e-01],
[-2.9814e-01, -1.8483e-01, -2.0122e-01]],
...,
[[ 1.2431e-02, -1.6540e-01, -3.1034e-01],
[-1.7686e-01, 6.3846e-02, -1.0064e-01],
[-6.8181e-02, 1.1702e-01, -9.9671e-02]],
[[ 3.9376e-02, -6.7272e-03, -5.1661e-03],
[-6.4462e-02, -4.5274e-01, -2.5319e-01],
[ 7.8366e-02, -1.1298e-01, 3.4371e-03]],
[[-4.8292e-02, -3.6601e-02, 5.0100e-02],
[ 7.6587e-02, 1.9720e-01, 1.0712e-01],
[-4.8900e-02, -1.0236e-01, -8.9209e-02]]],
[[[ 4.0820e-02, -1.9439e-02, -5.3006e-02],
[ 6.1164e-02, -5.1018e-02, 1.9288e-03],
[-1.3465e-02, -5.1301e-02, -5.9419e-02]],
[[ 1.1140e-02, -1.1505e-01, -8.1936e-03],
[ 4.2455e-02, -1.9023e-01, 1.5711e-02],
[ 1.2043e-01, -1.8554e-01, -9.2574e-02]],
[[-3.0289e-02, -7.0263e-02, -3.8138e-03],
[-1.3690e-01, -7.3234e-02, -1.1433e-01],
[-4.6510e-03, 1.1392e-03, -3.8805e-02]],
...,
[[-5.8057e-02, -1.8515e-01, -1.5655e-01],
[-1.1722e-02, -1.3224e-01, -1.6098e-01],
[-1.0463e-01, -1.6294e-01, -1.7142e-01]],
[[ 8.8986e-03, -3.0758e-03, -2.0770e-01],
[-1.0103e-01, 8.7865e-02, -2.1489e-01],
[-1.2164e-02, -1.3128e-01, -7.4216e-02]],
[[-2.6174e-02, 1.0251e-02, -3.3374e-03],
[-1.7775e-01, 4.9339e-02, -9.2476e-02],
[-1.5113e-01, 9.0147e-03, -8.1710e-02]]],
...,
[[[ 5.0808e-02, 6.6441e-03, 4.0068e-03],
[ 4.0065e-02, -3.6214e-02, 4.1752e-02],
[ 5.9055e-02, -2.4886e-02, 6.0118e-02]],
[[ 3.2985e-02, 1.0558e-02, -3.1881e-02],
[-4.3504e-02, -7.4770e-02, -3.7739e-02],
[-7.3664e-02, -3.0489e-02, -5.8007e-02]],
[[ 2.6783e-02, 3.9520e-03, 3.1898e-02],
[-4.2540e-02, -8.0390e-02, -3.0741e-02],
[-1.9155e-02, 4.8020e-03, 4.3321e-04]],
...,
[[-7.2444e-02, 7.2035e-02, -1.3005e-01],
[-1.0798e-01, -1.0178e-01, -1.3622e-01],
[-8.0916e-02, -4.0502e-02, -1.1368e-02]],
[[ 1.4674e-02, -3.0357e-02, 1.1630e-02],
[-6.2954e-02, -5.5702e-02, -6.3223e-02],
[-5.9295e-03, 1.0448e-02, 1.1963e-03]],
[[-7.2106e-02, -6.6035e-02, -6.8560e-02],
[ 2.0033e-02, -1.0701e-01, -9.1558e-02],
[ 3.6850e-02, -9.0923e-02, -4.5001e-02]]],
[[[-1.8032e-02, -3.9929e-02, 2.9539e-02],
[-8.6470e-03, 6.3237e-02, 6.0814e-02],
[ 3.2621e-02, 9.1388e-04, 3.8946e-03]],
[[-8.2637e-02, 8.4538e-02, -3.4281e-02],
[ 1.3271e-01, -8.1490e-02, -3.4012e-02],
[ 1.3098e-01, -1.7300e-02, 9.3265e-02]],
[[ 1.2237e-02, 1.7807e-01, 1.4323e-01],
[ 8.1503e-02, 2.4868e-02, 2.3561e-01],
[-9.0878e-02, -1.1324e-01, 1.0121e-01]],
...,
[[ 1.1140e-02, -1.1703e-01, -1.0318e-01],
[ 1.3877e-02, -5.8860e-02, 3.8448e-02],
[-1.5746e-01, -3.7298e-02, -1.5584e-01]],
[[ 1.1870e-02, -6.6810e-02, -3.3421e-01],
[-3.6463e-02, 4.8294e-02, -2.2714e-01],
[-7.1794e-03, 8.4795e-02, -3.0634e-02]],
[[-2.1458e-01, 2.4302e-02, 8.4266e-03],
[-8.7815e-03, -2.3534e-01, 3.2127e-02],
[ 1.6646e-01, -9.9765e-02, -2.5096e-02]]],
[[[-2.1473e-02, -1.0237e-02, -4.4908e-02],
[-1.5297e-02, 2.2916e-02, 1.1647e-02],
[ 3.2389e-02, 1.2144e-02, 4.4706e-02]],
[[ 6.7634e-03, 1.3515e-02, 8.9462e-02],
[ 1.1068e-01, -1.9611e-01, 2.0172e-02],
[ 6.6985e-02, -1.1060e-01, 2.9888e-03]],
[[-8.7099e-03, -2.0670e-02, 2.7012e-02],
[-3.1089e-02, 5.4082e-03, -5.6218e-02],
[ 5.4140e-02, 7.2267e-02, 7.0625e-02]],
...,
[[ 3.6518e-03, 2.4230e-02, -7.3860e-02],
[ 3.7955e-02, 7.7961e-02, -9.2444e-02],
[-5.1374e-02, 1.3852e-02, -5.9055e-02]],
[[-5.3988e-02, -5.5965e-02, -5.8176e-02],
[ 3.4509e-02, -1.0937e-01, -1.3856e-01],
[-7.0430e-02, 1.5420e-02, -8.6839e-02]],
[[-6.9920e-02, -7.0117e-02, -1.9787e-02],
[-1.3054e-01, -1.3143e-01, -1.0672e-01],
[-7.4613e-02, -1.0946e-01, -8.1836e-02]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([ 0.1070, 0.0357, -0.0100, 0.1063, 0.1294, 0.0059, -0.1200, 0.1041,
0.0779, -0.1660, 0.1722, -0.1026, -0.0518, 0.0301, 0.3230, 0.1477,
0.0812, -0.0231, -0.1103, -0.0944, 0.0760, -0.1299, -0.1107, 0.1728,
0.0063, -0.0616, 0.2231, -0.1073, -0.0372, -0.1806, 0.0292, -0.0923],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[-0.0150, 0.0318, -0.0770],
[-0.0466, 0.0198, -0.0866],
[-0.0248, 0.0038, -0.0499]],
[[-0.0517, 0.0061, -0.0346],
[ 0.0390, -0.0473, -0.0744],
[ 0.0232, -0.0374, -0.0083]],
[[-0.0186, -0.0218, 0.0453],
[ 0.0198, -0.0084, -0.0410],
[-0.0281, 0.0279, -0.0123]],
...,
[[-0.0054, 0.0595, 0.0033],
[-0.0480, -0.0476, 0.0523],
[ 0.0228, -0.0479, -0.0209]],
[[-0.0262, -0.0419, -0.0547],
[ 0.0090, -0.0255, 0.0117],
[ 0.0021, -0.0019, -0.0951]],
[[ 0.0043, -0.0286, -0.0216],
[ 0.0410, 0.0268, -0.0042],
[-0.0126, 0.0613, -0.0244]]],
[[[-0.2219, -0.0191, 0.0216],
[-0.0985, 0.1005, -0.0031],
[ 0.2009, 0.1476, -0.0391]],
[[ 0.0085, 0.1084, 0.0186],
[ 0.1300, 0.0423, -0.0332],
[-0.0259, 0.0555, -0.0592]],
[[-0.0898, -0.1455, 0.0250],
[-0.0860, 0.0041, 0.0317],
[-0.0387, 0.0006, 0.1297]],
...,
[[-0.0433, -0.0414, 0.1530],
[ 0.0149, -0.0524, -0.0604],
[-0.0219, 0.0308, -0.0350]],
[[ 0.1065, 0.0412, 0.0048],
[ 0.0300, -0.1366, -0.1657],
[-0.0542, -0.1644, 0.0985]],
[[ 0.0205, -0.0234, -0.0445],
[-0.0848, 0.0289, -0.0092],
[ 0.0503, -0.0191, -0.1004]]],
[[[ 0.0373, 0.0400, -0.0524],
[-0.0473, -0.0392, -0.0039],
[-0.0099, -0.0317, -0.0272]],
[[ 0.0409, -0.0046, 0.0025],
[ 0.0330, 0.0133, -0.0074],
[-0.0637, 0.0065, -0.0403]],
[[ 0.0434, -0.0114, 0.0225],
[ 0.0437, -0.0020, 0.0232],
[ 0.0057, 0.0252, -0.0080]],
...,
[[-0.0121, -0.0458, -0.0468],
[ 0.0258, -0.0124, -0.0447],
[ 0.0308, 0.0182, -0.0145]],
[[-0.0225, -0.0167, 0.0347],
[-0.0350, -0.0721, 0.0262],
[-0.0325, -0.0509, -0.0290]],
[[-0.0122, -0.0524, -0.0555],
[ 0.0343, 0.0100, -0.0204],
[-0.0327, -0.0063, 0.0231]]],
...,
[[[ 0.1358, -0.0066, -0.0378],
[ 0.1678, 0.1540, 0.0274],
[ 0.1240, -0.0090, -0.0271]],
[[ 0.2011, -0.0452, 0.0220],
[ 0.0736, -0.3399, -0.1104],
[ 0.0823, -0.3901, -0.2971]],
[[-0.0311, 0.1539, 0.0044],
[ 0.1377, 0.1046, -0.1147],
[ 0.0006, 0.2742, 0.1027]],
...,
[[-0.0196, 0.0180, 0.0795],
[-0.1063, 0.0555, 0.0725],
[ 0.0434, -0.0034, 0.0522]],
[[ 0.1082, -0.1328, -0.1814],
[ 0.0138, -0.1674, -0.1097],
[ 0.1687, 0.1663, 0.0386]],
[[-0.0486, 0.0083, -0.0151],
[-0.0100, 0.0362, 0.0769],
[ 0.0764, -0.0593, 0.0958]]],
[[[-0.0119, 0.0396, -0.0262],
[-0.0843, 0.0993, 0.0648],
[-0.0124, -0.0465, -0.0707]],
[[ 0.0522, 0.1477, -0.0658],
[ 0.1409, 0.0779, -0.2416],
[ 0.0903, -0.0128, -0.2156]],
[[ 0.0837, 0.1210, 0.0884],
[ 0.0911, 0.1906, -0.0202],
[ 0.0780, -0.0113, 0.0381]],
...,
[[-0.0585, 0.0202, -0.0170],
[ 0.0562, 0.1067, 0.0454],
[-0.0335, 0.0538, -0.0282]],
[[ 0.0626, 0.2250, 0.0399],
[ 0.0776, 0.0735, -0.1000],
[ 0.0432, -0.2371, -0.2598]],
[[-0.0627, 0.1224, 0.0923],
[ 0.0065, 0.1386, -0.0087],
[ 0.0784, 0.1460, -0.0171]]],
[[[ 0.0688, 0.0485, -0.0277],
[-0.0646, 0.0390, 0.0693],
[-0.0804, -0.1043, -0.0114]],
[[ 0.0494, 0.0353, 0.0955],
[-0.1395, -0.1273, -0.0333],
[-0.2547, -0.0778, 0.1001]],
[[ 0.0952, -0.0146, -0.0205],
[-0.0846, -0.2729, 0.0703],
[-0.1323, -0.0131, -0.1656]],
...,
[[-0.0566, 0.0557, 0.0382],
[ 0.0198, 0.0053, -0.0537],
[-0.0091, -0.0540, 0.0203]],
[[-0.0861, -0.1126, -0.1269],
[ 0.0293, 0.0220, -0.0427],
[ 0.0842, 0.1307, 0.0672]],
[[ 0.0503, 0.0285, -0.0276],
[-0.0639, 0.0560, 0.0045],
[-0.0982, -0.1009, 0.0463]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([-0.0171, 0.0043, -0.1169, 0.1291, 0.1294, -0.0659, -0.0798, 0.1529,
0.1140, -0.0574, -0.1235, 0.2279, 0.1250, -0.0690, -0.0344, 0.0707,
-0.0065, 0.0384, 0.0995, 0.1357, 0.0639, 0.1197, -0.1087, 0.2943,
-0.1341, 0.2712, 0.0330, 0.1006, 0.0938, 0.0594, -0.1260, 0.0276],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[ 2.2391e-03, 6.2425e-02, -2.5695e-02],
[-1.0837e-02, -1.8531e-02, -9.9877e-03],
[ 4.0818e-02, 4.0575e-03, 4.4186e-02]],
[[-2.2191e-01, -4.6574e-02, 1.6074e-01],
[-4.4618e-02, 4.5565e-02, -1.6764e-02],
[ 7.2159e-02, 1.0927e-01, -2.8275e-02]],
[[ 9.7170e-03, 4.2828e-02, -1.5962e-02],
[-2.2388e-02, -3.2781e-02, -1.8785e-02],
[-8.4385e-03, 8.1201e-03, 2.4629e-02]],
...,
[[-1.9153e-01, -1.7728e-01, -9.1222e-02],
[-1.0314e-02, 6.6546e-02, 3.9875e-02],
[ 1.8836e-02, 1.6795e-01, 8.4573e-02]],
[[ 9.0434e-03, -4.1294e-02, 1.6887e-01],
[ 1.1264e-01, 9.0104e-02, -1.5562e-02],
[-1.7117e-01, -3.5876e-01, -1.0746e-01]],
[[-1.2026e-01, 1.6294e-02, -6.1894e-02],
[-1.0783e-02, -6.3934e-02, -1.3985e-01],
[ 1.5959e-01, -3.5219e-02, -9.8983e-02]]],
[[[-3.1515e-02, -3.1064e-02, 1.5425e-02],
[-4.0473e-02, 2.8976e-02, -2.4385e-02],
[ 2.2343e-02, 1.3467e-02, 5.7279e-02]],
[[-8.7677e-02, 7.3356e-02, -1.1158e-02],
[-1.1506e-01, 1.0182e-02, 5.0727e-02],
[-2.3853e-01, -6.8789e-02, 1.5047e-01]],
[[ 1.1061e-02, -1.7608e-02, 5.5312e-02],
[ 5.4071e-02, 3.9234e-03, 1.8741e-02],
[ 1.9502e-02, -1.9423e-02, 5.6400e-02]],
...,
[[ 1.1149e-01, 4.7521e-02, -4.8633e-02],
[ 1.6337e-01, 1.2247e-02, -8.9658e-02],
[ 1.1576e-01, -1.7292e-01, -5.6706e-02]],
[[ 8.4325e-02, -4.1544e-02, -2.0713e-01],
[-9.3539e-02, -2.1562e-01, 6.5417e-02],
[-2.2193e-01, -9.0553e-02, -2.0478e-02]],
[[ 2.5163e-02, -4.5981e-02, -2.7772e-02],
[-1.1526e-01, -6.0485e-02, -8.2031e-02],
[-2.0193e-01, -1.2022e-01, -1.0521e-01]]],
[[[-8.8439e-03, -7.6802e-02, 1.3520e-02],
[-3.5951e-02, -2.2553e-02, -8.7814e-02],
[-7.9258e-02, -9.0494e-02, -4.2598e-02]],
[[ 5.9504e-02, 3.6170e-02, -1.5951e-01],
[ 3.6149e-02, -3.0274e-02, -2.8218e-02],
[-1.8060e-02, -1.6945e-03, -4.6629e-02]],
[[ 7.1449e-03, 5.3913e-02, -1.0841e-02],
[ 2.2433e-04, -3.5628e-03, 8.1957e-03],
[-8.6628e-03, 2.4426e-02, -4.4133e-02]],
...,
[[ 4.3721e-02, -6.4706e-02, 3.1578e-03],
[ 1.8596e-01, 2.9364e-02, -3.1520e-02],
[ 1.3557e-02, 4.1527e-02, -4.7275e-02]],
[[-3.0450e-02, -1.0619e-01, -1.9794e-01],
[-1.6143e-01, -2.0340e-01, -1.5934e-01],
[-3.2309e-01, -9.9979e-02, -3.5583e-02]],
[[ 4.7380e-03, 9.7717e-02, 7.9414e-02],
[-5.0842e-02, 1.2295e-01, 9.5895e-03],
[ 9.5522e-02, -1.2892e-02, -6.5715e-02]]],
...,
[[[ 7.2587e-02, 5.5597e-03, -8.9861e-02],
[ 4.2668e-02, -3.9856e-02, -4.0855e-03],
[-5.9935e-02, -4.8841e-02, 2.0140e-02]],
[[-1.4033e-01, -2.5161e-01, -1.2770e-01],
[ 3.4169e-02, -1.4304e-01, -8.1303e-02],
[-2.2024e-02, 1.4368e-01, -6.1032e-02]],
[[ 4.8712e-02, 5.5460e-02, 1.3916e-02],
[-1.4529e-02, 5.9837e-02, 4.5619e-03],
[-4.8822e-02, -5.1272e-02, -1.4227e-03]],
...,
[[-8.2247e-02, -1.1336e-01, 4.0329e-03],
[ 7.1724e-02, -7.9343e-02, -7.1628e-02],
[-3.4935e-02, -3.7427e-02, -1.5206e-02]],
[[ 4.3702e-02, 1.6080e-02, -2.5629e-02],
[ 1.5704e-01, -5.4358e-03, -7.4419e-02],
[ 1.8708e-01, 4.1621e-02, 1.2877e-03]],
[[ 1.1440e-01, -1.5175e-01, -1.9838e-01],
[-1.0173e-01, 5.3927e-03, -1.8081e-01],
[-1.5612e-01, -8.5859e-02, -1.4777e-01]]],
[[[ 5.0717e-02, -5.8669e-02, 2.2799e-02],
[-4.5309e-03, 2.9422e-02, -9.3265e-03],
[ 4.5615e-02, -4.3601e-03, -2.6716e-02]],
[[ 8.2415e-02, 8.3248e-02, -2.2479e-02],
[ 1.7095e-01, 1.0900e-01, -2.8289e-03],
[ 3.4248e-01, 8.9966e-02, -5.8209e-02]],
[[-4.3093e-02, -4.9602e-03, 4.4791e-02],
[ 5.5125e-02, 1.7006e-02, -1.5353e-02],
[-4.0234e-03, -7.5980e-03, -4.1683e-02]],
...,
[[-4.9183e-02, -2.1862e-01, -1.0797e-01],
[-2.1285e-01, -2.6351e-01, 8.3387e-02],
[-2.1624e-01, -2.5841e-02, 1.6530e-01]],
[[-1.6244e-01, 5.3209e-02, -5.1998e-02],
[-7.5224e-02, -7.1542e-02, -1.6757e-01],
[ 3.0978e-02, -1.4407e-01, -1.9104e-01]],
[[ 8.9470e-02, 4.9254e-02, -7.1323e-02],
[ 4.6241e-02, 7.4811e-02, 1.2469e-02],
[ 9.3837e-02, -8.1935e-02, -2.4116e-01]]],
[[[ 6.6036e-02, -5.7656e-02, -6.8947e-02],
[-4.5259e-02, 1.9547e-02, -4.7460e-02],
[ 6.1403e-02, 2.0600e-02, -3.5996e-02]],
[[ 1.3045e-03, 1.8087e-01, -1.0833e-01],
[ 1.8363e-01, 4.8122e-02, -1.5371e-01],
[ 1.2851e-01, 5.3339e-02, -8.6450e-02]],
[[-6.1851e-03, -3.1135e-03, 3.0432e-02],
[-4.6912e-03, 5.4882e-03, 1.8297e-02],
[-2.3558e-02, -2.7167e-02, -1.4688e-02]],
...,
[[ 3.0539e-03, 5.2623e-02, 1.0268e-01],
[ 1.0975e-01, 1.3927e-01, 1.0265e-01],
[ 4.7130e-02, 5.3210e-02, -5.8704e-02]],
[[ 9.6717e-02, 3.3477e-02, -1.2238e-01],
[ 1.5468e-01, 9.4130e-03, -1.5122e-01],
[-3.9407e-02, -1.4380e-01, -1.2360e-01]],
[[-1.1704e-01, -2.3607e-01, -2.2846e-01],
[-6.1460e-02, -3.1588e-01, -7.4085e-02],
[ 6.1826e-02, -1.7647e-01, 1.2735e-02]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([-0.0133, 0.0270, 0.0213, -0.0932, 0.0192, -0.0913, -0.1607, -0.0394,
0.1364, 0.1702, 0.0769, 0.1534, -0.1053, -0.0064, 0.0219, -0.0187,
0.0627, 0.1752, 0.0984, -0.2645, 0.2397, 0.0921, -0.0454, -0.0461,
0.0926, 0.0919, -0.0074, 0.0369, 0.1106, -0.0713, -0.0116, -0.0695],
device='cuda:0')),
('fc_layers.0.weight',
tensor([[-0.0638, -0.0555, -0.0419, ..., 0.0011, -0.0056, -0.0342],
[-0.0379, -0.3136, -0.1887, ..., 0.1393, 0.0928, 0.0142],
[-0.0847, -0.0307, 0.0787, ..., -0.1284, -0.1968, -0.1853],
...,
[-0.0410, -0.0180, 0.0125, ..., -0.0449, -0.0466, -0.0315],
[ 0.0528, 0.0993, -0.0301, ..., -0.2226, -0.0861, -0.1673],
[ 0.1412, -0.0628, -0.1255, ..., 0.1619, 0.1525, 0.0526]],
device='cuda:0')),
('fc_layers.0.bias',
tensor([-0.0444, -0.0017, 0.1416, 0.0621, 0.1032, -0.0798, 0.2015, 0.0067,
0.0033, 0.2247, -0.0145, 0.0331, 0.0500, -0.0624, -0.0177, 0.0668,
-0.0324, 0.0582, -0.0671, 0.1084, -0.0334, -0.0641, -0.0249, -0.0609,
0.1637, -0.0423, -0.0627, -0.0207, -0.0416, -0.0295, 0.0925, -0.0923,
0.1005, -0.0281, -0.0488, -0.0086, -0.0343, 0.0613, 0.0130, -0.0351,
0.0251, -0.0829, 0.0098, -0.0647, -0.0344, 0.0221, -0.0343, -0.0122,
-0.0440, -0.0098, -0.0083, -0.0354, 0.1021, 0.0018, 0.0924, -0.0652,
0.0655, -0.0263, -0.0285, 0.0253, -0.1186, -0.0132, 0.1255, -0.0235,
0.0115, 0.0556, 0.0774, 0.1796, -0.0007, 0.0127, -0.0712, -0.0186,
-0.0233, -0.0175, -0.0714, -0.0519, 0.0658, 0.0043, -0.0841, -0.0954,
-0.0891, -0.0158, -0.0353, 0.1231, -0.0104, 0.0230, 0.0902, -0.0488,
-0.0743, -0.0524, -0.0190, -0.1724, 0.2399, 0.0388, -0.0846, 0.0844,
-0.0979, 0.0041, 0.0004, -0.0716, -0.0703, -0.0267, -0.0593, -0.0982,
-0.1031, -0.0482, -0.0558, 0.0103, 0.0163, -0.0787, 0.0992, 0.0809,
-0.0573, -0.0246, -0.0391, 0.0254, -0.1354, 0.1931, 0.0421, 0.1388,
0.0059, 0.0794, -0.0863, 0.0593, -0.0583, -0.0277, -0.0646, -0.0837,
0.0525, 0.0355, 0.0206, -0.0518, -0.0025, 0.0532, -0.0663, -0.0564,
0.0369, 0.2679, 0.0869, 0.0673, -0.1349, 0.0275, 0.1029, -0.0151,
0.1558, -0.0555, 0.0050, -0.0146, 0.1591, -0.1106, -0.0567, 0.0574,
-0.0534, -0.0497, 0.0695, -0.0655, 0.0143, -0.0605, -0.0372, 0.0638,
-0.0354, -0.0395, -0.0442, 0.0179, 0.0428, -0.0739, 0.0137, -0.0848,
-0.1344, -0.0607, -0.0536, -0.0290, -0.0956, -0.0681, -0.0790, -0.0563,
-0.0485, -0.0753, -0.0712, -0.0365, 0.0030, -0.0841, 0.0112, -0.0398,
0.0093, 0.0964, 0.0109, -0.0684, 0.0795, 0.0084, -0.0807, 0.1721,
0.0116, -0.0273, -0.0533, 0.1193, 0.0113, 0.0196, 0.0718, 0.0428,
0.0275, -0.0506, 0.0028, -0.0162, 0.0140, -0.0674, 0.0031, 0.0601,
-0.0057, -0.0011, -0.0209, 0.0810, -0.0564, -0.0093, -0.0203, 0.0922,
-0.0578, -0.0836, -0.0338, -0.0506, 0.2029, -0.1017, -0.0267, -0.0600,
0.0826, -0.1394, 0.0615, 0.2680, 0.0139, 0.0174, -0.0052, 0.0150,
0.0289, -0.0522, -0.0837, -0.0330, -0.0720, -0.0935, -0.0121, -0.0003,
0.0343, 0.0340, 0.0517, -0.0716, 0.0284, 0.0517, -0.0334, 0.0204,
-0.0695, 0.0238, 0.0699, -0.0444, -0.0053, -0.0548, 0.0501, -0.0154],
device='cuda:0')),
('fc_layers.2.weight',
tensor([[-0.0079, -0.0210, -0.0714, ..., 0.0060, -0.1895, 0.0582],
[ 0.0085, 0.2469, 0.0944, ..., 0.0245, -0.0989, -0.1308],
[-0.0127, -0.0740, 0.0721, ..., -0.0024, 0.3109, 0.0092],
...,
[-0.0768, -0.0833, -0.2150, ..., -0.0382, 0.0901, -0.0582],
[-0.0047, -0.0388, 0.0481, ..., -0.0467, 0.0084, -0.0370],
[-0.0339, -0.1272, 0.1898, ..., -0.0272, -0.1542, 0.2864]],
device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.0055, 0.0861, 0.2112, 0.0243, 0.0544, -0.1316, -0.0630, -0.1747,
-0.0587, -0.1220], device='cuda:0'))])},
{'ratio': 0.58,
'bias': 0,
'train_losses': [276.11776620680126,
211.75440826174685,
180.50740631317385,
164.02740398127372,
155.9786988792203,
150.4543334959273,
145.99686278722672,
143.1561550744541,
140.41088390142298,
138.41891614304788,
136.52042902951464,
136.56706119311835,
134.05425663373026,
132.92586159518876,
132.5391981761909],
'test_losses': [236.16187713660446,
189.0961557369606,
166.12777780084048,
154.80130874409394,
147.0391907785453,
142.6933127524806,
139.60511359513976,
139.2054525496913,
133.683624309652,
131.8453458477469,
129.7945331218196,
132.46755506478104,
128.5068732429953,
128.74146354899688,
125.40426300553715],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[ 0.1260, -0.1025, -0.1530],
[-0.0056, 0.0640, -0.1436],
[ 0.2058, -0.0386, 0.0898]],
[[-0.0087, -0.0662, -0.2229],
[ 0.1327, 0.0935, -0.2135],
[ 0.1794, 0.2053, 0.0514]],
[[-0.1470, -0.0944, 0.1029],
[ 0.0071, -0.0977, 0.0239],
[ 0.0103, 0.1493, -0.0899]]],
[[[-0.1354, -0.0276, -0.1700],
[-0.0065, -0.0561, -0.0926],
[-0.0323, 0.3110, 0.2203]],
[[-0.1831, -0.2448, -0.0620],
[ 0.0136, -0.1564, 0.1912],
[ 0.2017, 0.1155, 0.2096]],
[[-0.1146, -0.1950, -0.0301],
[-0.0229, 0.0555, -0.1166],
[ 0.0308, 0.2480, 0.1618]]],
[[[ 0.1130, 0.1380, -0.1429],
[ 0.2229, 0.0265, -0.2293],
[-0.0153, 0.0036, -0.1214]],
[[ 0.0740, 0.2246, 0.0384],
[-0.0497, 0.1902, -0.0270],
[-0.0351, -0.1716, -0.2097]],
[[ 0.2097, 0.1519, 0.0451],
[-0.0008, -0.0949, -0.2185],
[ 0.0529, -0.0383, -0.1044]]],
[[[-0.1645, -0.0673, -0.0351],
[-0.1066, -0.1555, 0.2146],
[ 0.0699, 0.0528, 0.1451]],
[[-0.2340, 0.1249, -0.0181],
[-0.1370, 0.1475, -0.1306],
[-0.1729, -0.1691, -0.0107]],
[[-0.0374, 0.0582, -0.1461],
[ 0.1870, -0.0704, 0.1350],
[-0.0026, 0.1877, 0.1658]]],
[[[-0.0091, 0.0504, 0.1846],
[-0.2327, 0.0479, -0.0886],
[-0.1088, 0.1578, 0.1855]],
[[-0.0228, 0.2341, -0.0777],
[-0.2940, -0.1645, -0.1402],
[-0.1109, 0.2235, 0.1753]],
[[ 0.0582, 0.1119, 0.0689],
[-0.2269, -0.1741, -0.0729],
[-0.0904, 0.1995, 0.0288]]],
[[[ 0.0880, 0.0147, 0.1641],
[-0.1518, -0.0760, 0.1150],
[-0.2194, -0.0367, 0.0955]],
[[-0.0927, -0.0570, 0.2388],
[-0.3154, -0.0673, 0.2480],
[-0.2016, 0.0455, 0.1929]],
[[ 0.0375, 0.1526, 0.0882],
[-0.1167, -0.0246, -0.1022],
[-0.1406, 0.0543, 0.0918]]],
[[[ 0.0613, 0.0130, 0.0674],
[-0.2087, -0.1293, 0.0861],
[ 0.1094, -0.1090, 0.0826]],
[[ 0.0072, 0.1129, -0.0504],
[-0.1949, -0.3302, 0.0220],
[ 0.0206, -0.1084, 0.1269]],
[[ 0.0734, 0.0531, -0.1069],
[ 0.0213, 0.0749, 0.0252],
[-0.1869, 0.0375, 0.0311]]],
[[[ 0.0879, -0.1128, -0.3249],
[ 0.1679, 0.0301, -0.0236],
[-0.0838, 0.0728, 0.1681]],
[[-0.0221, -0.1907, -0.2959],
[-0.0022, -0.1290, -0.1321],
[ 0.2100, 0.1993, 0.2027]],
[[ 0.0826, -0.0931, -0.0479],
[ 0.1649, -0.1614, 0.0754],
[ 0.2216, 0.0730, -0.1458]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([ 0.1755, 0.2145, 0.1820, -0.3742, -0.0699, 0.2394, -0.0963, 0.1486],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[-1.8038e-02, -1.7918e-02, -9.8823e-02],
[-1.4231e-02, 7.2700e-04, -1.0628e-02],
[ 8.9570e-02, 1.2226e-01, 3.8656e-02]],
[[-4.0519e-02, -7.1961e-02, 1.0272e-01],
[-1.7736e-01, 2.8394e-02, -2.8846e-02],
[ 1.0465e-01, -3.2002e-02, 7.5192e-02]],
[[ 6.6148e-02, -2.2106e-02, -1.7838e-01],
[ 8.3525e-04, -8.6881e-03, -2.0822e-01],
[ 7.2234e-02, -8.6343e-02, 1.6593e-02]],
...,
[[-2.3235e-01, -3.0756e-02, 1.6536e-01],
[-3.9086e-01, -1.0575e-01, 7.6025e-03],
[-3.6476e-01, -1.9043e-01, 1.5472e-01]],
[[ 2.6654e-02, 2.5580e-01, 2.0309e-01],
[-7.9191e-02, -7.4133e-03, 2.6734e-01],
[-1.5811e-01, -1.0182e-01, -2.5573e-02]],
[[ 3.7605e-02, 7.7080e-02, 1.6351e-02],
[ 1.6469e-01, 1.1474e-01, 9.1112e-03],
[-2.0444e-04, 2.7012e-02, 1.5215e-01]]],
[[[-4.0073e-02, -2.2397e-02, 6.8333e-02],
[ 5.4353e-02, -1.0010e-01, -3.7991e-03],
[ 5.9141e-02, -8.4000e-02, -5.7785e-02]],
[[-9.2565e-02, -3.5516e-02, -1.1229e-01],
[-6.3441e-02, -6.5279e-02, 7.9814e-02],
[-4.7090e-02, -6.5528e-03, -1.7422e-02]],
[[ 2.4611e-02, -2.6484e-02, -1.3095e-01],
[-1.1578e-01, -7.5827e-02, -1.3983e-01],
[-9.0826e-02, -8.1527e-02, -8.8357e-02]],
...,
[[ 3.4982e-02, -9.6360e-02, 8.3741e-02],
[-1.5315e-01, 3.5497e-02, 2.7518e-02],
[-8.1631e-02, -3.6951e-02, -6.4972e-02]],
[[-1.5731e-02, 1.1257e-02, -5.5014e-02],
[-9.8445e-02, -1.2981e-01, -8.4738e-02],
[-4.0686e-02, -2.8060e-03, -1.1789e-01]],
[[ 7.8407e-02, 2.3245e-02, -1.0778e-01],
[-8.0159e-02, -2.0408e-02, -6.8034e-02],
[-6.4778e-02, 5.8080e-03, 2.3732e-02]]],
[[[-1.7989e-01, -8.7699e-02, 5.1265e-02],
[-1.3391e-01, 4.8343e-02, -4.2599e-02],
[-1.8534e-01, -1.2344e-01, -5.6648e-02]],
[[-2.1469e-01, -2.3669e-01, -7.2916e-02],
[-9.9292e-02, 2.7853e-02, 5.2479e-02],
[-1.8046e-01, -1.6056e-01, -1.8522e-01]],
[[-1.6004e-02, 3.5880e-02, 3.8932e-02],
[-1.0908e-01, 1.1995e-01, -7.3770e-02],
[-1.0592e-01, -5.2098e-02, 1.0208e-01]],
...,
[[ 1.4828e-01, -8.1646e-02, -4.3921e-02],
[ 1.6197e-01, 7.5747e-02, -6.7721e-02],
[ 1.0806e-01, 1.7852e-01, 1.2614e-01]],
[[-1.0970e-02, 4.0806e-02, 4.9619e-02],
[ 1.0151e-01, -6.2879e-02, -9.6922e-02],
[-4.7020e-03, 7.3009e-02, 1.0851e-01]],
[[-2.4596e-01, -8.0419e-02, -2.9050e-02],
[-1.8541e-01, 1.4348e-01, 3.9575e-02],
[-1.7921e-01, 2.1255e-03, -9.3960e-02]]],
...,
[[[-1.2487e-01, -4.9520e-02, -9.9742e-03],
[ 3.1019e-02, 5.5596e-02, 7.4654e-02],
[-1.9822e-01, 3.0270e-02, 3.1211e-02]],
[[-9.5885e-02, 5.6686e-02, 4.5150e-02],
[ 1.6382e-01, 1.2850e-01, -8.2616e-02],
[ 2.4151e-02, -4.6237e-02, -2.1400e-01]],
[[-1.6436e-01, -1.7851e-02, -2.4552e-01],
[-1.9357e-01, 1.2781e-02, -6.8092e-02],
[-4.8701e-02, 1.3439e-01, 6.7565e-02]],
...,
[[ 1.7282e-01, 1.0777e-01, -2.8937e-02],
[ 5.9017e-02, -4.9941e-02, -1.7544e-01],
[ 1.9211e-02, -1.5221e-02, -1.5681e-01]],
[[ 1.3559e-01, 8.9926e-03, -1.8569e-01],
[ 6.6233e-02, 7.9300e-02, -1.0336e-01],
[ 5.1497e-02, -2.5038e-02, -1.9833e-01]],
[[-1.5144e-01, -6.0635e-02, 5.5487e-02],
[ 1.0720e-02, 1.1928e-02, 5.1128e-02],
[-1.6068e-01, 3.8040e-02, -4.8949e-02]]],
[[[-9.5088e-02, 2.0892e-01, -1.6375e-01],
[-2.1641e-01, 1.2718e-01, -1.3395e-01],
[-1.2082e-01, 8.3301e-02, -1.0377e-01]],
[[-3.9202e-01, -2.1899e-01, -5.2780e-02],
[-8.6829e-02, 1.8543e-02, 1.3922e-01],
[ 1.4983e-01, 1.2962e-01, 2.1578e-01]],
[[ 1.5658e-01, 1.0396e-01, -1.9868e-01],
[ 6.2420e-03, -1.2364e-01, -1.4659e-01],
[-9.4090e-02, 5.4292e-02, -1.8380e-01]],
...,
[[-9.1896e-02, -1.1359e-02, 5.3635e-02],
[ 7.8755e-02, -2.6303e-02, 1.0545e-01],
[-2.2016e-02, 1.8545e-02, 7.8659e-03]],
[[ 4.5266e-02, -8.6524e-02, 6.2199e-02],
[ 4.3616e-02, 1.2617e-01, -8.5404e-02],
[ 1.9205e-01, 5.8274e-02, 7.5546e-02]],
[[-9.1500e-02, 1.0778e-01, 1.7015e-02],
[ 4.8582e-02, -3.1686e-02, 1.4932e-02],
[ 1.0336e-01, 1.0302e-01, 6.9417e-02]]],
[[[-1.0458e-01, -1.1302e-01, -1.2189e-01],
[ 9.6313e-02, -6.1936e-03, -7.2215e-02],
[ 1.2049e-01, 1.3340e-01, 9.4450e-02]],
[[-7.0353e-02, -2.0318e-01, -1.6613e-01],
[-1.6602e-02, -1.7431e-02, -3.7973e-03],
[ 2.1648e-01, 2.5569e-01, 1.8557e-01]],
[[ 3.2622e-02, 1.0608e-01, -2.0390e-02],
[-6.1135e-02, 2.1202e-03, 4.0139e-02],
[-1.5186e-01, -1.2633e-01, -1.2234e-01]],
...,
[[-3.1648e-02, 9.9777e-02, 4.9688e-02],
[-1.3709e-01, -1.1543e-01, 1.3437e-01],
[-1.0895e-01, -2.0899e-01, -1.4709e-01]],
[[ 3.3925e-02, 3.1508e-02, 1.3986e-01],
[ 7.4618e-02, -3.4085e-02, 9.0385e-02],
[-7.0514e-02, -1.2272e-01, -1.9183e-02]],
[[-8.6653e-02, -1.4230e-01, -1.5746e-01],
[ 9.3350e-02, 1.0324e-03, -4.8912e-02],
[ 1.6370e-01, 2.4463e-01, 7.8871e-02]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([ 0.0830, -0.1149, 0.2811, 0.1237, 0.2536, -0.0079, -0.2495, 0.0880,
-0.2269, -0.1090, 0.0386, 0.1747, 0.0149, 0.3297, -0.0585, -0.0199],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[-1.5159e-02, -1.5357e-01, 3.1964e-02],
[-4.8871e-03, -1.9883e-01, 1.1992e-02],
[ 2.3123e-03, 2.0315e-02, -6.5065e-02]],
[[ 3.8293e-02, -7.8142e-03, -2.9676e-02],
[-5.9269e-02, 4.3709e-02, -1.0198e-01],
[-9.3374e-04, -1.5282e-02, -9.9865e-02]],
[[-1.0703e-01, 9.8639e-02, 1.6766e-01],
[-2.8714e-02, 2.7054e-01, 2.1548e-01],
[-1.7277e-01, -2.2856e-02, 4.0882e-02]],
...,
[[ 5.2827e-02, 7.4228e-02, -8.0520e-02],
[-1.1564e-01, -7.6488e-02, 4.4313e-02],
[-5.6776e-02, -3.7667e-02, 1.0734e-01]],
[[-2.1752e-02, -1.9339e-01, -2.0251e-01],
[ 1.1413e-02, 8.5601e-03, -1.6446e-01],
[ 7.8189e-02, 6.6607e-02, 2.6890e-02]],
[[ 1.2882e-01, 1.6850e-01, 2.7880e-01],
[ 6.4768e-02, -5.4483e-02, 5.5471e-02],
[-2.2094e-01, -1.8181e-01, -6.4828e-02]]],
[[[-4.5231e-02, -2.6036e-01, -1.9615e-01],
[ 3.3988e-02, -1.3570e-01, 1.2197e-01],
[-1.9844e-01, -1.3311e-01, 1.4772e-01]],
[[ 1.5861e-02, -4.8077e-02, 8.1452e-03],
[ 2.8926e-03, -4.3383e-02, 1.5889e-02],
[ 1.0978e-02, -9.8349e-02, 3.1873e-02]],
[[-1.0741e-01, -2.0908e-01, -1.9469e-01],
[ 2.5413e-02, -8.1529e-02, -1.8756e-01],
[ 1.2777e-01, -4.9756e-02, -4.3020e-02]],
...,
[[-1.6026e-01, 1.0256e-01, 2.6394e-01],
[-2.6230e-02, 1.7209e-01, 1.9629e-01],
[ 1.5343e-01, 2.3959e-01, -4.7045e-02]],
[[ 1.8409e-01, 6.0347e-02, -1.0620e-02],
[ 1.5039e-01, -9.9465e-02, -1.4396e-01],
[ 8.6903e-02, -6.0802e-02, -1.2732e-01]],
[[-1.8249e-01, -4.2915e-01, -1.4993e-01],
[-1.5918e-01, -3.0957e-01, 5.9324e-02],
[ 2.8185e-03, -3.7829e-02, 3.4537e-01]]],
[[[-9.0506e-03, 8.6173e-02, 3.0888e-02],
[-2.1241e-03, 5.3898e-02, 9.7772e-02],
[-9.3738e-02, -1.5954e-01, -4.6954e-02]],
[[ 1.3554e-01, -2.0438e-03, 2.9076e-03],
[ 2.0868e-02, -3.9518e-02, 3.0496e-02],
[-8.0706e-02, -6.9091e-02, -2.0950e-02]],
[[ 1.6018e-02, -2.6484e-02, -1.4100e-01],
[ 2.0748e-01, 2.3973e-01, 1.0857e-02],
[ 1.6652e-01, 2.1120e-01, -2.6357e-02]],
...,
[[-1.4452e-02, -1.4324e-01, -1.2445e-01],
[-9.8976e-03, -1.8566e-02, -9.6667e-02],
[ 6.3519e-02, 8.9367e-02, -1.0695e-01]],
[[-4.9657e-02, -3.5937e-02, -4.2609e-02],
[-1.0196e-01, -8.9364e-02, -6.4049e-02],
[ 1.6906e-01, 5.3830e-02, -8.0322e-02]],
[[ 4.5951e-01, 2.8995e-01, 1.3476e-01],
[ 2.6436e-01, 9.8588e-02, 9.6387e-02],
[ 6.7266e-02, -1.3676e-02, -1.0335e-01]]],
...,
[[[-1.7846e-02, -1.1152e-01, -1.4668e-01],
[-1.0734e-02, 8.1933e-02, 7.3245e-03],
[ 8.5840e-02, 9.8763e-02, 1.3509e-01]],
[[ 1.1400e-01, -9.1624e-03, -7.5266e-02],
[-1.9092e-02, 3.2902e-03, 4.7884e-02],
[-7.0619e-02, 2.7370e-03, -6.3522e-02]],
[[ 3.5512e-02, 1.7843e-01, 2.2264e-01],
[-3.4938e-01, -4.4678e-02, 2.5163e-01],
[-3.6166e-01, -2.1223e-01, 9.8153e-02]],
...,
[[ 6.1684e-02, -2.7873e-02, 2.1109e-01],
[ 7.3174e-02, -1.7370e-01, 1.2892e-01],
[ 6.1130e-02, -1.5403e-01, 5.1069e-03]],
[[-9.2707e-02, 1.7460e-02, 6.4910e-02],
[ 3.0788e-02, 7.2301e-02, 3.0574e-02],
[-1.2626e-01, 3.2899e-02, -6.7662e-02]],
[[-2.7120e-01, -3.4718e-01, -1.7930e-01],
[-1.2219e-01, 3.4309e-02, 1.2053e-01],
[-5.4054e-02, -2.7338e-02, 1.2853e-01]]],
[[[-2.1188e-01, -1.8332e-01, 1.6280e-02],
[-3.2024e-04, 4.6876e-02, 1.7423e-01],
[-1.3065e-01, 1.7127e-02, 1.1247e-01]],
[[-6.3785e-02, 4.9716e-03, 2.8079e-02],
[ 1.4171e-02, 2.4001e-02, -2.2533e-02],
[-1.0671e-02, 9.4136e-02, -4.2256e-02]],
[[ 4.5439e-02, 6.2359e-02, -4.4556e-02],
[ 9.0206e-02, 6.2817e-02, -4.8811e-02],
[ 1.4236e-01, 9.4357e-03, -5.6343e-02]],
...,
[[-6.8878e-02, 9.0436e-02, 1.3854e-01],
[ 2.7417e-02, 1.3739e-01, 1.2296e-01],
[ 3.6777e-02, 8.2811e-02, 1.7315e-01]],
[[ 1.0117e-01, -8.6797e-02, -1.0844e-01],
[ 3.6410e-02, -5.5259e-02, -1.2783e-01],
[ 6.8452e-02, -4.5180e-02, -2.1184e-01]],
[[-1.2684e-01, -4.7279e-01, -1.7320e-01],
[ 1.7908e-03, -4.6264e-01, -1.3954e-01],
[ 1.9788e-01, -2.3086e-01, -8.0476e-02]]],
[[[-5.7285e-02, -3.1623e-02, -5.0206e-02],
[-4.3511e-02, -1.0759e-01, -5.7251e-02],
[-1.4999e-02, -5.1110e-03, -1.1587e-01]],
[[-5.4317e-02, 8.3997e-03, -2.4669e-02],
[-1.1558e-02, 1.5808e-02, -7.1731e-02],
[-9.4891e-02, -8.9794e-02, 3.7829e-02]],
[[-1.0093e-01, -3.4401e-02, -3.1227e-02],
[-7.7409e-02, 7.6845e-02, 1.6562e-02],
[ 9.6722e-02, 2.5094e-01, 2.5645e-01]],
...,
[[ 5.2109e-02, 1.3042e-02, -1.9811e-02],
[ 9.1746e-02, -1.7295e-01, 1.3157e-01],
[ 1.9183e-02, -3.1044e-02, 2.2009e-01]],
[[-1.5274e-01, -1.1558e-01, -2.4752e-02],
[-1.2289e-01, 7.8796e-03, -1.0985e-01],
[ 7.5975e-02, 3.0561e-02, 1.3888e-02]],
[[-6.6256e-02, -4.9052e-02, -3.8315e-02],
[-2.8864e-01, -3.9537e-01, -2.0418e-01],
[ 6.7839e-03, 1.0331e-01, -8.2940e-02]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([-0.0249, 0.1737, -0.0636, -0.0201, -0.2582, -0.0141, 0.1144, 0.0578,
0.0338, -0.1267, 0.0956, -0.0607, 0.2544, 0.0455, 0.0115, 0.1019,
0.0484, 0.0660, -0.0265, 0.1865, 0.2336, 0.1356, 0.0379, 0.0649,
0.1112, 0.2249, -0.0384, 0.1601, 0.0985, 0.1094, 0.1239, 0.0426],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[ 0.0241, -0.1215, -0.2081],
[-0.0864, -0.1306, -0.0113],
[-0.0927, 0.0309, 0.0870]],
[[ 0.0072, 0.1436, 0.1865],
[ 0.1886, 0.2346, 0.1240],
[ 0.2580, 0.0799, -0.0557]],
[[-0.1047, -0.0675, -0.1257],
[-0.1101, -0.1813, -0.0410],
[ 0.0938, 0.0740, 0.1281]],
...,
[[ 0.0206, -0.3242, -0.2118],
[-0.2351, -0.4272, -0.0930],
[-0.1831, -0.1660, 0.1271]],
[[-0.1919, 0.0759, 0.1194],
[-0.0524, 0.0242, 0.0965],
[ 0.1550, 0.1006, -0.0622]],
[[ 0.0421, 0.0818, 0.0691],
[ 0.0649, -0.0488, 0.0582],
[-0.1067, -0.1252, 0.0196]]],
[[[-0.0995, 0.0359, 0.0542],
[ 0.0481, -0.0472, 0.0295],
[ 0.0724, 0.0867, 0.0938]],
[[-0.1130, -0.2027, 0.1714],
[-0.1914, -0.3251, 0.0652],
[-0.0337, -0.4884, 0.0372]],
[[-0.0128, -0.0447, -0.0927],
[-0.0247, -0.1932, -0.1069],
[ 0.0053, -0.0098, -0.0368]],
...,
[[ 0.0623, -0.0297, -0.0203],
[-0.0768, 0.0517, 0.0688],
[-0.0684, -0.0192, 0.0381]],
[[-0.1181, -0.0607, 0.0634],
[ 0.0477, -0.1014, 0.0029],
[ 0.1788, -0.2608, 0.0581]],
[[ 0.0263, -0.0838, 0.0220],
[-0.0322, 0.0565, -0.1151],
[ 0.1534, 0.1762, 0.1053]]],
[[[-0.0358, -0.0405, -0.0563],
[-0.0661, -0.0943, -0.0016],
[ 0.0074, -0.1607, -0.1671]],
[[-0.0196, -0.0123, -0.0818],
[ 0.0913, 0.0455, -0.0139],
[ 0.0933, 0.1251, 0.0795]],
[[ 0.0217, -0.0614, -0.1666],
[ 0.0816, -0.0822, -0.1156],
[ 0.1104, -0.0535, -0.1336]],
...,
[[ 0.0937, -0.0332, -0.0067],
[-0.0074, -0.0270, -0.0557],
[-0.1262, -0.1062, 0.0034]],
[[ 0.0753, 0.0686, -0.0654],
[ 0.0410, 0.0572, 0.0344],
[ 0.1562, 0.0708, 0.0243]],
[[-0.0399, -0.1548, -0.1140],
[ 0.0192, -0.0800, -0.0751],
[-0.0235, -0.1278, -0.0220]]],
...,
[[[ 0.0962, -0.0465, 0.1298],
[-0.0794, -0.1129, 0.0965],
[-0.0904, -0.0241, 0.1628]],
[[-0.1482, 0.0922, 0.1346],
[-0.1679, 0.0288, 0.0879],
[-0.2215, -0.0437, -0.1290]],
[[-0.0846, 0.0842, 0.0725],
[-0.0553, -0.0171, -0.0433],
[-0.0468, -0.0264, -0.0407]],
...,
[[-0.1422, -0.1844, -0.2878],
[-0.0867, -0.0745, 0.0944],
[-0.1478, -0.0455, 0.0799]],
[[-0.0601, 0.0437, 0.1724],
[-0.1472, -0.0308, 0.0125],
[-0.0970, -0.0138, -0.1092]],
[[-0.0364, -0.0633, 0.1463],
[-0.0813, 0.0159, -0.0073],
[ 0.1851, 0.0935, 0.1180]]],
[[[-0.0154, -0.0886, -0.1035],
[-0.0349, -0.0609, -0.0642],
[-0.0101, 0.0070, -0.0053]],
[[ 0.0043, -0.0344, -0.0724],
[-0.0906, -0.0239, 0.0112],
[-0.0865, -0.0421, 0.0025]],
[[-0.0638, -0.0040, 0.0339],
[-0.1015, -0.0984, -0.0027],
[ 0.0231, 0.0138, 0.0593]],
...,
[[ 0.0454, 0.0647, -0.0056],
[-0.0526, -0.0320, -0.0199],
[ 0.0151, 0.0043, -0.0181]],
[[-0.0804, -0.0251, 0.0457],
[-0.0561, -0.0381, 0.0255],
[-0.0421, -0.0872, -0.0092]],
[[ 0.0298, -0.0746, -0.0790],
[ 0.0307, -0.0363, -0.0672],
[-0.0101, 0.0376, -0.0599]]],
[[[-0.0377, -0.0396, -0.1259],
[ 0.0781, -0.0029, -0.1612],
[ 0.0594, -0.0329, -0.1587]],
[[ 0.0128, 0.0795, -0.0350],
[-0.1660, -0.2690, -0.1860],
[-0.0464, -0.1308, -0.0191]],
[[ 0.1465, 0.0821, -0.0974],
[ 0.0278, 0.0184, -0.0597],
[-0.0196, -0.1285, -0.2200]],
...,
[[ 0.0576, -0.2414, -0.3014],
[ 0.1304, 0.1097, -0.0236],
[ 0.1730, 0.1249, 0.2552]],
[[ 0.0421, -0.0053, 0.0198],
[-0.0035, -0.1439, -0.1509],
[-0.1014, -0.1892, -0.1510]],
[[-0.0206, -0.0153, -0.0861],
[-0.0697, -0.1129, -0.0375],
[-0.1230, -0.2854, -0.2531]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([ 0.1719, 0.2134, 0.1217, -0.0504, -0.0894, -0.0136, -0.1375, -0.0436,
0.1455, -0.0661, -0.1127, -0.0547, -0.0704, -0.0697, -0.0494, 0.0672,
0.1702, -0.0260, -0.1079, 0.1347, -0.0488, -0.0987, 0.0760, 0.0786,
-0.0738, -0.0683, -0.0867, 0.1110, 0.1146, 0.1142, -0.0705, 0.0338],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[-1.8611e-01, -2.1173e-01, -1.5476e-01],
[-7.5681e-02, -1.3217e-01, -3.8437e-02],
[ 3.2172e-02, -1.6116e-01, 3.2026e-02]],
[[-4.7857e-02, -8.1274e-02, -2.1818e-03],
[ 4.1314e-02, 5.0682e-02, 4.6977e-02],
[ 9.4126e-03, 9.4538e-02, 1.0177e-01]],
[[-1.2752e-01, -1.1788e-01, -1.0585e-01],
[ 2.1040e-01, 1.3633e-01, 8.5255e-02],
[ 1.3185e-01, 1.4489e-01, 1.7348e-02]],
...,
[[ 4.9718e-02, -1.2167e-02, -2.8080e-02],
[-9.2502e-02, -8.9364e-02, 6.3864e-02],
[-6.2357e-02, 3.4540e-02, 6.3643e-02]],
[[ 4.4047e-02, -3.8921e-02, 5.2757e-02],
[-4.1809e-02, -4.2952e-02, 1.1548e-02],
[-4.9424e-03, -3.3193e-02, 4.8293e-02]],
[[ 7.2649e-02, 1.0994e-01, 2.3409e-03],
[-2.3719e-01, -1.1668e-01, -1.6286e-01],
[-6.9609e-02, -1.5684e-02, 7.4860e-02]]],
[[[ 1.3767e-01, 1.0384e-01, 1.9359e-01],
[-4.0937e-02, 6.5504e-02, 8.0934e-02],
[-1.6220e-01, -6.1308e-02, -1.0281e-01]],
[[ 1.0757e-01, 3.9658e-02, -3.4049e-02],
[-1.0423e-01, -1.9457e-01, -2.3487e-01],
[-8.1962e-02, -2.8739e-01, -1.2399e-01]],
[[ 7.5705e-02, 2.0086e-02, 2.1522e-02],
[-3.1026e-02, 8.9527e-03, 7.5131e-02],
[ 2.9353e-02, 8.0005e-02, 1.5865e-01]],
...,
[[-1.0184e-01, 1.1209e-01, 1.2860e-01],
[-1.6567e-01, 1.2423e-01, 1.4798e-01],
[-3.1706e-02, 1.5217e-01, 1.2573e-01]],
[[ 8.8367e-02, -2.8921e-02, -8.0392e-02],
[ 7.7044e-02, -6.5965e-02, 4.1848e-03],
[-5.0832e-02, 4.8358e-02, -1.6264e-02]],
[[-1.9532e-02, -4.7255e-02, -8.4465e-02],
[-1.4843e-02, 4.6111e-02, -7.2080e-02],
[ 8.1802e-02, 5.9537e-02, -1.0310e-02]]],
[[[-1.8722e-02, -5.0539e-02, -4.8648e-02],
[ 4.1284e-02, 2.3904e-02, 5.7577e-02],
[ 7.1015e-02, 1.4079e-02, -3.5972e-02]],
[[-1.3413e-01, 5.7188e-02, 3.1780e-02],
[-1.3052e-01, 2.0846e-02, 6.2837e-02],
[-1.4650e-01, 7.7300e-02, 3.0812e-02]],
[[-5.1147e-02, -1.0347e-01, -1.0505e-01],
[ 5.1666e-02, 5.8274e-03, -1.4855e-01],
[ 1.4660e-02, -1.0005e-01, -1.0585e-01]],
...,
[[ 8.2734e-02, 2.4406e-02, -1.5703e-01],
[ 9.0380e-02, -4.2940e-02, -4.3065e-02],
[ 6.4142e-02, 3.2078e-02, -8.1099e-02]],
[[-9.9700e-02, -9.5451e-02, -2.3508e-02],
[-1.1808e-01, -2.8663e-02, -4.3995e-02],
[-8.5427e-02, 4.3678e-02, 1.4992e-02]],
[[-3.4040e-02, -2.2161e-02, 8.3918e-02],
[-1.9609e-02, -2.2842e-01, -9.6255e-02],
[ 1.2915e-01, -2.8437e-01, -2.2004e-01]]],
...,
[[[-6.9516e-02, -6.3188e-02, -5.6546e-02],
[ 1.3898e-02, -1.6235e-02, 2.1150e-02],
[ 2.0829e-02, -2.3653e-02, -3.2574e-03]],
[[-3.7453e-03, -8.6889e-03, -6.0923e-02],
[-5.6433e-02, -7.7812e-02, -1.2687e-01],
[-6.3533e-02, -5.4503e-02, -9.3785e-02]],
[[ 9.6634e-03, -2.1584e-02, -5.7851e-02],
[-2.2441e-02, -6.1729e-02, 1.1836e-02],
[-6.6286e-02, -5.7016e-02, -1.3706e-02]],
...,
[[-6.2189e-02, -7.3086e-02, -3.7296e-02],
[-7.0725e-02, -5.0949e-02, 9.4950e-04],
[ 2.5471e-02, -3.2089e-02, 1.1622e-02]],
[[ 1.0202e-02, 4.8953e-02, 5.5042e-02],
[ 1.7454e-02, -4.7641e-02, 4.4791e-03],
[ 5.5161e-02, 2.5645e-02, -1.1121e-02]],
[[ 2.5005e-02, -2.0402e-02, -3.6594e-02],
[-9.4508e-02, 2.2413e-02, -4.2938e-02],
[ 4.6113e-03, -8.9161e-02, -2.5934e-02]]],
[[[-1.9426e-02, 1.4834e-02, 5.1117e-02],
[ 3.1577e-02, 4.3598e-02, 1.0243e-02],
[-5.1230e-03, 8.1176e-02, -2.3391e-03]],
[[ 5.2194e-02, -9.4869e-02, 8.8236e-02],
[-2.6021e-02, -8.3654e-02, 1.4056e-02],
[-6.1823e-02, -7.9130e-02, -1.5485e-01]],
[[-6.7073e-02, -1.2388e-01, -1.9105e-02],
[-6.8379e-02, -8.8586e-02, -4.1003e-03],
[-5.7345e-02, -1.3278e-01, -1.4111e-01]],
...,
[[-1.9625e-02, 2.0204e-02, 7.7494e-02],
[-6.0224e-02, -8.5226e-02, -7.3394e-02],
[-8.7015e-02, -3.2824e-01, -1.0607e-01]],
[[-4.8195e-02, 9.5470e-02, -4.3475e-02],
[-7.2721e-02, 3.9199e-02, -1.8761e-02],
[ 1.7869e-02, -1.0342e-02, -3.2887e-02]],
[[-4.9715e-02, -1.2840e-01, -1.1701e-01],
[ 1.8608e-02, -1.3257e-01, -1.0345e-01],
[ 1.6203e-02, -6.2980e-02, 1.3167e-01]]],
[[[ 8.7513e-02, 7.3278e-02, -9.9796e-02],
[ 1.2896e-01, 1.4048e-01, -7.2620e-02],
[-1.1709e-02, 5.3219e-02, -2.2291e-02]],
[[-1.4933e-01, -1.8216e-01, 1.1946e-01],
[-5.3395e-02, -6.9660e-02, 4.6785e-02],
[ 1.6441e-01, 6.7216e-02, -1.3346e-01]],
[[ 3.5386e-03, -1.2649e-01, -4.9213e-02],
[-4.2206e-02, -1.6453e-01, -1.1806e-01],
[ 2.7839e-02, 1.3378e-02, 3.3605e-02]],
...,
[[-2.6466e-02, -8.8836e-03, 2.3551e-02],
[ 4.6966e-03, 1.1885e-01, -2.2808e-02],
[-1.2835e-01, 1.2063e-02, 3.0082e-02]],
[[-1.7873e-02, -8.4855e-03, -6.1442e-02],
[-2.2839e-02, -5.8446e-02, 1.3025e-04],
[-2.4610e-02, -5.4720e-02, 4.5768e-02]],
[[ 6.3420e-02, -1.2695e-01, -2.0834e-02],
[-4.5936e-02, -1.5325e-03, 2.2972e-02],
[-2.1640e-01, -2.3646e-02, 1.2209e-01]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([-0.0922, 0.0534, 0.0289, 0.1075, 0.2093, -0.0012, 0.1622, 0.1248,
0.0126, 0.2343, -0.0874, 0.0869, 0.1451, 0.2513, 0.1634, -0.0985,
0.2725, 0.0588, -0.0896, 0.0940, -0.0732, 0.0189, 0.1568, -0.0522,
0.0527, 0.0586, -0.0792, 0.0195, 0.0236, -0.0458, -0.0434, 0.0668],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[-7.4800e-02, -6.2504e-02, 1.6335e-02],
[-5.9156e-02, 1.1278e-03, 7.9337e-02],
[ 6.3767e-02, -1.8542e-01, -1.2867e-01]],
[[-2.0163e-01, -2.8119e-01, -1.7003e-01],
[-1.6916e-01, -1.7438e-01, -1.4727e-01],
[ 9.9132e-02, -4.3207e-02, -2.0744e-01]],
[[ 5.9460e-02, 1.0858e-01, 1.6151e-02],
[ 4.7777e-02, 1.3922e-02, -6.4595e-02],
[ 7.5647e-02, 7.4751e-02, -2.9223e-02]],
...,
[[-8.7460e-02, -7.7122e-02, -8.9945e-03],
[-4.5239e-02, 2.9736e-02, 2.8152e-02],
[-8.3227e-03, -1.4119e-03, -4.8275e-02]],
[[ 1.0793e-01, 6.8425e-02, 6.8645e-02],
[ 8.7855e-02, 4.8203e-02, 1.6358e-01],
[ 6.5637e-02, 5.7651e-02, 3.2776e-02]],
[[-1.4856e-02, -4.0976e-02, -2.3659e-01],
[-8.4363e-02, -3.5468e-02, -2.0746e-01],
[-3.9858e-02, -4.7083e-02, -2.3002e-01]]],
[[[ 4.8065e-02, 6.1314e-02, -8.8346e-02],
[-3.7266e-02, 1.0493e-01, 1.0154e-01],
[-8.7959e-02, 1.0529e-01, 8.2673e-02]],
[[ 5.7536e-03, 8.7972e-02, 1.7827e-01],
[ 1.3790e-01, 1.5982e-01, -4.1360e-02],
[ 1.1342e-01, 1.4375e-03, -1.9160e-01]],
[[-7.5166e-02, 2.2313e-02, 7.1587e-02],
[ 8.5163e-03, -1.8776e-02, 5.6605e-02],
[ 3.9201e-02, -6.3899e-02, 8.9856e-04]],
...,
[[-5.3547e-02, -1.5935e-02, -1.1539e-02],
[-6.3290e-02, -2.5561e-02, 8.0058e-03],
[-2.4900e-02, -2.5258e-02, 5.2170e-02]],
[[-4.5758e-02, -1.3636e-02, -1.4292e-01],
[-5.4119e-02, -1.5561e-01, -8.4891e-02],
[-1.2347e-01, -2.7400e-01, 4.4526e-02]],
[[ 7.7424e-02, 5.4465e-04, 6.4757e-02],
[ 9.0497e-02, -5.3297e-02, 8.2394e-02],
[ 7.5043e-02, 8.5600e-03, -3.6064e-02]]],
[[[ 6.6098e-02, 9.6645e-02, 4.2971e-02],
[-2.0045e-01, -1.9137e-01, -1.1074e-01],
[ 5.3231e-02, 3.4922e-02, -1.8295e-01]],
[[ 4.4160e-04, -1.6889e-02, -1.3277e-01],
[ 1.7191e-01, 7.3019e-02, -8.5435e-02],
[ 2.4872e-02, -2.2829e-01, -1.1931e-01]],
[[ 7.8230e-02, -3.1023e-02, -9.5386e-02],
[-3.3054e-02, -2.4654e-01, -2.6280e-01],
[ 3.4409e-02, 3.3377e-02, -6.4448e-03]],
...,
[[-1.0207e-02, -5.0111e-02, 6.7373e-03],
[ 4.0621e-02, 8.9900e-03, 1.7735e-02],
[ 6.6721e-02, 1.5469e-02, -8.5324e-04]],
[[-1.3796e-01, -8.1350e-02, 9.2203e-02],
[-1.3319e-01, -2.4167e-01, -7.5574e-02],
[-1.2477e-01, -2.8469e-01, -1.4836e-01]],
[[ 1.6326e-01, 1.0599e-01, -3.1075e-02],
[-5.3371e-02, -1.3316e-01, -7.0749e-02],
[ 1.1554e-01, -1.1235e-01, -9.1289e-02]]],
...,
[[[-1.9580e-01, -9.8003e-02, 5.9877e-02],
[ 1.3194e-01, 3.6985e-02, -2.9874e-02],
[-7.1550e-02, -5.4211e-02, -4.1304e-02]],
[[-9.0913e-02, -1.8834e-01, 1.3253e-01],
[-2.0102e-01, -2.4974e-01, -3.8285e-02],
[-2.8275e-01, -7.4311e-02, -1.9142e-02]],
[[ 1.6353e-01, 9.8634e-02, -3.9271e-02],
[ 1.0953e-01, 9.7190e-02, 2.6712e-02],
[ 8.9114e-02, 8.4085e-02, 7.2265e-02]],
...,
[[ 4.0429e-02, 1.7303e-02, -8.7029e-02],
[-7.1821e-02, -1.9547e-02, 6.2962e-03],
[ 7.3510e-03, 5.6395e-02, -1.7797e-02]],
[[ 4.3498e-02, -4.9444e-02, -1.9398e-01],
[ 7.3766e-02, -5.0676e-03, -6.7043e-02],
[-2.4738e-02, 5.2624e-02, -3.9303e-02]],
[[ 9.1393e-02, 7.0191e-02, -4.5413e-02],
[ 4.1654e-02, -4.3964e-02, -3.2744e-02],
[-5.7727e-02, -9.3479e-02, -6.8900e-02]]],
[[[-9.4722e-02, 7.4452e-02, 8.7302e-02],
[ 1.7391e-02, 5.2280e-02, 1.0621e-01],
[ 1.5096e-03, -1.8531e-02, 1.4543e-01]],
[[-9.5608e-02, 7.6295e-02, -6.5714e-02],
[ 3.6983e-02, 1.6992e-01, -1.2634e-01],
[ 9.5811e-02, 1.7099e-01, 1.4946e-01]],
[[ 3.0396e-02, -1.1898e-02, -2.1356e-02],
[-6.4896e-02, 4.1883e-02, 2.2500e-02],
[-1.0890e-01, -9.3153e-02, -7.6747e-03]],
...,
[[-1.0032e-02, 6.4271e-02, 1.9267e-02],
[-2.8996e-02, -9.7088e-03, 2.8652e-04],
[-3.6841e-02, 2.0053e-03, -2.6778e-03]],
[[-1.3578e-01, 3.6059e-02, 6.6287e-02],
[-1.4500e-01, -3.9346e-02, -3.6145e-02],
[-1.9345e-01, -2.3784e-01, -2.3243e-01]],
[[ 1.7452e-01, 2.2193e-01, 1.2567e-01],
[ 7.9766e-02, 1.5548e-01, 5.7584e-02],
[ 9.5316e-02, 1.6398e-01, -2.2004e-03]]],
[[[-7.3988e-02, -2.2090e-01, -8.3087e-02],
[ 2.4601e-01, 1.2784e-01, 6.7552e-02],
[ 1.3910e-02, -8.0143e-02, -2.2098e-01]],
[[-2.1299e-01, -3.1024e-01, -3.1580e-01],
[-9.0657e-02, 4.1846e-03, 8.6367e-04],
[ 1.3232e-01, 2.0885e-01, 1.3087e-01]],
[[ 1.7829e-01, -2.6924e-02, -8.3899e-02],
[-7.7894e-02, -2.0233e-01, 6.2550e-02],
[ 2.0440e-02, -1.7794e-01, 1.2444e-01]],
...,
[[-3.2911e-02, 7.9222e-03, 2.2436e-02],
[-2.3485e-02, 1.3725e-02, 1.0205e-02],
[ 1.6555e-02, -3.9097e-02, 3.6753e-02]],
[[ 3.3712e-03, -1.8883e-02, 3.0935e-02],
[ 1.2510e-01, -8.7799e-03, 1.6484e-01],
[ 8.7672e-02, 8.0008e-02, 1.6365e-01]],
[[-2.8808e-02, 7.7313e-02, 2.2094e-01],
[-1.8681e-02, -1.5246e-01, -9.3219e-04],
[-1.3110e-03, -3.6371e-03, 2.0578e-01]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([ 0.1844, -0.1336, 0.1648, -0.0118, 0.0866, 0.2561, -0.1972, 0.0925,
-0.0814, 0.1959, 0.1269, 0.1412, 0.0193, 0.0856, 0.0446, 0.0225,
0.2130, 0.1530, 0.0409, 0.1774, 0.1196, 0.1906, -0.0442, 0.1012,
0.0078, 0.0605, -0.0543, 0.1195, 0.1923, 0.1627, -0.0435, -0.0482],
device='cuda:0')),
('fc_layers.0.weight',
tensor([[-0.0328, -0.0318, -0.0403, ..., 0.0166, 0.0174, -0.0304],
[ 0.0270, -0.0008, 0.1407, ..., -0.2989, -0.1349, 0.0570],
[-0.0009, 0.0270, 0.0233, ..., -0.0208, -0.0518, -0.0076],
...,
[ 0.0117, 0.0251, -0.1356, ..., -0.1717, -0.2447, 0.0356],
[-0.0091, -0.0273, -0.0214, ..., -0.0064, 0.0031, 0.0260],
[-0.0023, -0.0147, -0.0594, ..., -0.0614, -0.0418, 0.0349]],
device='cuda:0')),
('fc_layers.0.bias',
tensor([-1.6239e-02, 1.0923e-01, -2.8516e-03, -3.4562e-02, -3.8935e-02,
2.6838e-01, -9.5792e-03, -4.7951e-02, -5.0420e-02, -4.7779e-02,
1.5300e-01, 2.0850e-01, 1.0595e-01, -2.5888e-02, 3.4631e-02,
-6.9657e-02, -3.8805e-02, -4.6439e-02, 1.1409e-01, -5.9578e-02,
-3.4340e-02, 8.2158e-02, 1.6506e-03, 6.1588e-02, 2.1332e-02,
2.5505e-02, -3.2998e-02, -2.4146e-02, -4.0811e-02, 3.4497e-02,
-2.4349e-02, -8.3809e-03, -3.6324e-02, -3.3375e-02, 1.5518e-01,
-2.2934e-02, 3.0315e-02, -1.9900e-01, 9.1135e-02, 1.3891e-03,
-1.8285e-02, -6.4289e-02, -1.7493e-02, -2.7214e-02, -5.3926e-02,
-8.5462e-02, -1.7204e-02, -1.1632e-02, 3.3002e-01, 1.6031e-01,
4.9596e-02, 3.9052e-02, 4.9962e-02, 2.7786e-01, -7.1928e-03,
5.5197e-02, 1.0226e-01, 3.7699e-02, -7.6467e-02, -1.3759e-01,
1.4240e-01, -5.7557e-02, 5.1371e-03, 2.2396e-01, -1.4112e-02,
-2.5035e-02, -4.4538e-02, -6.3499e-03, 1.5337e-01, -5.8866e-02,
1.1374e-02, -7.1386e-02, 6.7330e-03, 8.5862e-02, 1.5886e-02,
1.0027e-01, -8.5168e-02, -2.7267e-02, 2.0361e-04, -6.5557e-02,
-8.7466e-02, -1.3498e-02, -2.5257e-02, -1.7296e-02, -7.9615e-02,
-4.3327e-02, -5.0456e-02, -1.4147e-02, 1.9972e-02, -2.1920e-02,
-6.2179e-02, -6.1604e-02, 1.4420e-01, -1.2878e-02, -6.5223e-02,
1.5523e-01, 1.1567e-01, -5.3510e-02, -4.5581e-02, -4.8011e-02,
-5.6728e-02, -1.9497e-02, 1.5061e-01, -3.4220e-02, 1.0898e-01,
1.3276e-03, 3.1907e-03, 3.1949e-02, 7.7712e-02, 1.0700e-01,
1.1075e-01, 2.6997e-02, -5.2971e-02, -7.2003e-02, -9.1618e-02,
-4.4794e-02, -3.4451e-02, -2.1062e-02, 1.8828e-01, -1.0083e-01,
-3.9372e-02, 1.7090e-01, 2.2109e-01, 1.0274e-01, -2.0732e-03,
-7.0506e-02, -4.7738e-02, 6.6988e-04, -1.0072e-01, 3.7746e-03,
-7.1554e-02, 7.9473e-02, -6.2979e-03, 2.1948e-02, -6.7200e-02,
-7.4002e-02, 3.1581e-02, 5.9462e-02, 1.1358e-02, 2.0694e-01,
8.3849e-03, -6.1303e-02, -5.1582e-02, 1.0200e-01, -3.3679e-02,
-4.2478e-02, 9.2998e-02, 1.6232e-01, 1.0466e-01, -5.5543e-02,
1.2001e-02, -1.0628e-01, 7.2968e-02, -7.3252e-02, 5.4807e-03,
1.3956e-01, -1.2730e-02, -4.1100e-03, 1.2055e-01, -3.8652e-02,
4.5427e-02, -2.5458e-02, -5.9116e-02, -7.6093e-02, -9.5814e-02,
3.0348e-01, -4.5076e-02, 1.8271e-02, -6.5136e-02, -2.8736e-03,
2.2027e-01, 9.8514e-02, -9.4142e-02, -1.2537e-02, 1.0526e-01,
-6.6977e-02, -4.5705e-02, -1.2813e-02, -5.3912e-02, 1.6488e-02,
-1.5103e-02, -1.2798e-02, 7.5066e-02, -5.9629e-02, -5.8831e-02,
-3.2580e-03, 1.7053e-02, 4.9883e-02, 4.8562e-02, -1.1022e-02,
-4.2881e-02, 2.5595e-02, -7.2761e-02, -3.7728e-02, 3.3112e-01,
-3.1459e-02, 2.4509e-01, 3.7849e-02, -8.2555e-02, -1.9617e-02,
-5.8830e-02, -8.2432e-02, 1.3855e-01, -5.6709e-02, -6.5949e-02,
-2.5247e-02, -1.7518e-01, -1.9569e-03, -8.8462e-03, -6.6194e-02,
-2.5738e-02, 6.7898e-02, -6.7083e-02, 1.5637e-01, 1.1019e-01,
4.5845e-02, -2.3574e-02, -6.7694e-02, 1.7070e-02, -1.6205e-01,
-4.1495e-02, -3.5686e-02, -5.4307e-02, -8.2441e-02, -6.7833e-02,
-1.8962e-02, 2.2435e-02, 8.8004e-02, 1.3901e-02, 1.0242e-01,
1.3243e-01, 7.3199e-02, -8.9502e-02, 7.5639e-03, -4.5814e-02,
-4.1167e-02, 4.7797e-02, -5.1378e-02, 1.0509e-01, 3.0008e-02,
-5.2779e-02, -8.5182e-02, 3.0903e-03, -2.9993e-02, -6.4896e-02,
-3.8243e-02, -7.9818e-02, -1.5021e-02, 1.1704e-01, -3.1683e-02,
-2.3707e-02, 1.5499e-01, 3.8685e-03, 5.9336e-03, -5.5619e-02,
-4.0461e-02], device='cuda:0')),
('fc_layers.2.weight',
tensor([[-0.0347, -0.1325, 0.0408, ..., -0.0324, 0.0068, -0.0031],
[-0.0190, 0.0726, -0.0323, ..., 0.0295, 0.0149, 0.0311],
[-0.0353, -0.0447, 0.0433, ..., -0.0516, -0.0832, -0.0358],
...,
[-0.0651, -0.0219, -0.0540, ..., 0.0305, -0.0618, -0.0192],
[-0.0234, -0.1443, 0.0237, ..., -0.1868, -0.0264, -0.0333],
[ 0.0321, 0.0257, -0.0473, ..., -0.0086, -0.0177, 0.1005]],
device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.0834, -0.0677, 0.1544, 0.0944, 0.1253, -0.0005, -0.1302, -0.1930,
-0.0351, -0.1008], device='cuda:0'))])},
{'ratio': 0.58,
'bias': 32,
'train_losses': [286.1557893690638,
261.5400017539452,
194.96539259872304,
174.08391483770407,
163.10553415574745,
157.33185192219665,
152.3981426080484,
149.51641716965415,
146.9264845001344,
143.4521388148346,
142.37324153440784,
140.39622470928944,
138.74513549403162,
138.29618876652358,
137.64099336648277],
'test_losses': [283.9790431938919,
224.15436995263192,
181.02008390426636,
167.93521857729147,
154.85402447569604,
150.23339782976637,
147.1227441628774,
148.2921481880487,
143.5350534962673,
139.43081399506212,
139.5185690019645,
133.92849222351524,
135.24419937881768,
134.07853160184973,
134.1070124682258],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[ 0.0744, 0.2526, 0.1069],
[-0.1976, 0.1165, -0.0101],
[-0.0661, -0.1788, -0.0865]],
[[ 0.0695, 0.3105, 0.0375],
[-0.1336, 0.0459, 0.1914],
[-0.1500, -0.0669, -0.2780]],
[[ 0.1485, 0.1128, -0.0180],
[-0.2006, 0.1971, -0.1733],
[-0.0569, -0.1167, 0.0830]]],
[[[ 0.1165, 0.1278, 0.1424],
[ 0.0960, -0.0669, 0.1793],
[-0.0639, -0.2145, -0.1874]],
[[-0.0388, 0.0143, 0.0094],
[ 0.0861, 0.0044, -0.1843],
[-0.2424, 0.0184, -0.1882]],
[[ 0.0487, 0.1686, 0.0898],
[-0.0722, -0.2101, 0.0749],
[-0.0349, 0.1047, 0.1731]]],
[[[-0.0055, -0.1349, 0.0771],
[-0.1862, 0.0733, 0.0739],
[-0.1122, 0.0715, 0.1898]],
[[-0.2629, -0.1360, 0.2173],
[-0.0620, 0.1245, -0.0106],
[-0.1921, -0.1404, 0.2594]],
[[-0.1012, 0.0986, 0.1050],
[ 0.1085, -0.1839, 0.1516],
[-0.1006, 0.1402, -0.0411]]],
[[[ 0.1225, 0.0551, 0.0381],
[ 0.3132, -0.0254, -0.0155],
[-0.1027, -0.1574, -0.1938]],
[[ 0.2369, 0.2013, 0.0944],
[ 0.0473, 0.0552, -0.2249],
[-0.0295, -0.3324, -0.2955]],
[[ 0.0361, -0.1011, 0.1245],
[ 0.1248, 0.1860, 0.0844],
[-0.0039, -0.1354, -0.0603]]],
[[[-0.0802, 0.1513, -0.0598],
[-0.1379, 0.1300, -0.0652],
[ 0.0340, -0.0107, -0.0234]],
[[-0.0166, -0.1374, -0.1697],
[-0.1535, -0.1265, -0.1503],
[-0.0350, 0.1396, 0.1127]],
[[ 0.0850, -0.0801, 0.0740],
[ 0.0847, -0.0280, 0.0536],
[-0.0122, 0.1257, 0.0119]]],
[[[ 0.0090, -0.2928, -0.2239],
[ 0.1726, -0.0678, -0.0482],
[-0.0423, 0.2755, 0.2275]],
[[ 0.0234, -0.2294, -0.2724],
[ 0.1172, -0.1903, -0.0670],
[ 0.1318, 0.2053, 0.1485]],
[[-0.0854, -0.1245, -0.0111],
[-0.0307, 0.0956, 0.1822],
[ 0.0741, 0.0639, 0.0186]]],
[[[-0.0549, 0.0186, -0.1858],
[ 0.2392, 0.0247, 0.0772],
[ 0.1695, -0.0495, -0.2027]],
[[ 0.1326, -0.1357, -0.2132],
[ 0.2695, 0.0660, -0.1857],
[ 0.0763, -0.0487, 0.0126]],
[[ 0.1712, -0.1145, -0.1366],
[-0.0647, -0.0049, -0.0475],
[ 0.2342, 0.0033, -0.0782]]],
[[[-0.0968, -0.2583, 0.0600],
[-0.2019, 0.1412, 0.1502],
[ 0.1991, 0.2397, 0.2277]],
[[-0.1468, 0.0082, 0.2460],
[-0.0028, -0.1098, -0.0341],
[ 0.1544, -0.1374, -0.0306]],
[[ 0.0023, 0.0081, 0.0300],
[-0.0061, -0.0999, 0.0784],
[-0.0562, -0.2509, 0.0435]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([ 0.2593, -0.2966, 0.2244, 0.2068, -0.1695, 0.2259, 0.1979, -0.4707],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[-0.0746, -0.0177, -0.0064],
[-0.2118, -0.3655, -0.1038],
[-0.1415, -0.0710, 0.0508]],
[[ 0.0213, 0.0179, -0.0377],
[ 0.0621, -0.0045, 0.0487],
[ 0.2661, 0.3147, 0.2055]],
[[ 0.1097, 0.0263, 0.1104],
[-0.0917, 0.1363, -0.0432],
[ 0.0278, 0.0454, 0.0704]],
...,
[[-0.0249, -0.0208, 0.1251],
[-0.0440, 0.1017, 0.0323],
[ 0.1780, 0.1813, 0.0306]],
[[-0.0575, -0.0899, 0.0442],
[-0.0782, 0.0102, 0.0466],
[ 0.1034, -0.0334, 0.2671]],
[[-0.0412, 0.0966, 0.1446],
[-0.0241, 0.0338, 0.2628],
[-0.0087, -0.0246, 0.0296]]],
[[[ 0.0305, -0.0795, 0.0543],
[ 0.0231, -0.0659, 0.0424],
[-0.1049, -0.0799, 0.1403]],
[[ 0.1152, -0.0912, 0.1571],
[ 0.0595, 0.0034, -0.0179],
[-0.0748, -0.1589, -0.1121]],
[[ 0.0342, -0.0244, 0.0696],
[-0.1116, 0.1781, 0.1387],
[ 0.0546, 0.1259, 0.1131]],
...,
[[-0.0181, 0.1951, -0.0328],
[ 0.0820, 0.0954, 0.0091],
[-0.0101, 0.1879, -0.1682]],
[[ 0.0517, 0.0939, -0.1983],
[ 0.1272, -0.1418, -0.1922],
[-0.0354, 0.0097, 0.0541]],
[[ 0.0201, 0.0038, 0.0143],
[-0.0075, -0.0509, -0.0608],
[-0.1632, 0.1483, 0.1297]]],
[[[-0.0968, -0.0744, 0.0237],
[ 0.0360, -0.0577, -0.1057],
[-0.1119, 0.0083, -0.1583]],
[[-0.0698, -0.0934, 0.0480],
[ 0.0248, 0.0670, -0.1069],
[-0.0132, -0.0651, -0.0106]],
[[-0.2840, -0.0316, 0.1608],
[-0.2669, 0.0082, 0.1388],
[-0.2264, -0.0369, 0.1990]],
...,
[[ 0.0169, 0.0646, -0.0528],
[-0.0965, 0.0157, -0.0035],
[ 0.1145, 0.1754, 0.1096]],
[[ 0.1574, 0.0470, -0.0668],
[ 0.0464, 0.0524, -0.0873],
[-0.0633, -0.0504, -0.0559]],
[[-0.3887, -0.4271, -0.3747],
[-0.3126, -0.2094, -0.2286],
[-0.1547, -0.1383, -0.2435]]],
...,
[[[-0.1062, -0.2394, -0.0176],
[-0.2538, -0.0660, 0.1085],
[-0.0810, -0.1741, 0.0014]],
[[-0.2252, -0.1851, -0.0207],
[-0.1874, -0.0772, 0.0177],
[ 0.0131, 0.1221, 0.0833]],
[[ 0.1535, -0.0533, 0.1313],
[ 0.1314, 0.0434, -0.0387],
[ 0.1594, 0.0624, 0.0351]],
...,
[[-0.0378, 0.0199, 0.1043],
[-0.0079, -0.0527, -0.0823],
[ 0.1305, -0.1079, -0.0897]],
[[ 0.0374, -0.1339, -0.2032],
[ 0.0359, -0.0744, -0.1330],
[-0.0348, -0.1752, 0.1304]],
[[-0.1522, -0.0831, 0.0257],
[-0.0901, -0.1155, -0.1171],
[-0.0171, -0.0564, 0.1615]]],
[[[-0.1037, -0.1653, -0.1453],
[ 0.0337, 0.0710, -0.0270],
[ 0.0826, 0.1945, 0.1508]],
[[-0.1650, -0.2237, 0.0097],
[-0.0885, -0.0479, -0.0334],
[-0.1004, -0.0725, 0.0699]],
[[ 0.0878, 0.0042, 0.0054],
[ 0.0361, 0.0964, -0.1664],
[ 0.2454, 0.0026, 0.0362]],
...,
[[-0.1077, -0.0062, 0.0775],
[-0.1255, -0.0193, 0.2377],
[-0.0409, -0.1572, 0.0143]],
[[-0.2733, 0.0508, 0.0056],
[-0.2870, -0.0361, 0.2025],
[-0.3325, 0.0875, 0.0244]],
[[-0.0603, -0.0209, 0.1861],
[-0.0588, -0.0449, 0.1824],
[-0.0881, -0.0936, 0.0785]]],
[[[ 0.1222, 0.0968, 0.2196],
[ 0.0217, 0.1330, 0.0984],
[-0.0203, 0.0026, -0.0465]],
[[-0.1142, -0.0623, -0.0792],
[-0.0683, -0.1253, -0.0187],
[-0.1062, 0.0663, 0.0275]],
[[-0.0151, -0.0791, 0.0613],
[-0.0759, -0.0772, 0.0292],
[ 0.1066, 0.0451, -0.0723]],
...,
[[-0.2264, -0.2284, -0.1295],
[-0.2167, -0.1615, -0.2574],
[-0.0198, -0.0347, 0.0266]],
[[-0.1009, -0.1169, -0.0286],
[-0.1254, 0.0700, -0.0318],
[-0.0837, -0.0531, -0.0061]],
[[ 0.1313, 0.1989, 0.0930],
[-0.1139, 0.0303, -0.0755],
[-0.2623, -0.1795, -0.2023]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([-0.3212, -0.3471, -0.1049, 0.2132, 0.1312, -0.1551, 0.0818, 0.1512,
0.1025, 0.1943, -0.2473, -0.2936, 0.2273, 0.2374, 0.3064, 0.0912],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[ 5.7345e-02, 6.3154e-02, 6.9560e-02],
[-3.1391e-02, 2.0279e-02, 6.7393e-02],
[ 2.6087e-02, -8.3343e-02, 1.7794e-02]],
[[ 2.0827e-02, -7.2909e-02, 1.9064e-04],
[-4.3177e-02, -1.0790e-01, -1.1005e-03],
[-2.7564e-02, -5.9114e-02, -1.1330e-01]],
[[ 5.5357e-02, -8.8008e-03, 2.7416e-02],
[-2.8840e-02, -1.2764e-01, -8.1998e-02],
[ 4.3556e-02, 2.6671e-02, -3.8318e-02]],
...,
[[ 8.9874e-03, 1.1354e-02, -3.3328e-03],
[ 1.7153e-02, -4.3853e-02, -9.8573e-02],
[-6.7723e-02, -3.7680e-02, -6.6921e-02]],
[[-3.8708e-02, -1.1732e-01, -1.5711e-01],
[-1.0504e-02, -1.0072e-01, -1.8373e-01],
[-8.8156e-02, -7.3903e-02, -3.3800e-02]],
[[-1.6958e-01, -1.8381e-01, -1.8844e-01],
[-1.8217e-01, -6.8709e-02, -5.4344e-02],
[-1.5569e-01, -1.1182e-01, -9.8213e-02]]],
[[[-8.3367e-02, -1.8301e-02, -1.2379e-01],
[-5.9966e-02, 3.5735e-02, -7.0751e-02],
[ 2.9652e-02, 3.5194e-02, -7.2677e-02]],
[[ 4.8897e-02, 3.5206e-02, -2.2569e-02],
[-7.3389e-02, -2.0189e-02, -2.3356e-04],
[-1.0937e-01, -1.2326e-01, -7.6498e-02]],
[[ 3.1682e-02, -7.7843e-02, 5.8754e-02],
[ 1.1889e-02, -8.7783e-02, 4.3633e-02],
[-1.0679e-02, -1.2787e-01, -4.5413e-03]],
...,
[[-1.0361e-01, -8.9984e-02, -1.1379e-01],
[-6.7319e-02, -1.3771e-01, 4.3470e-02],
[-3.1459e-02, -9.2337e-02, -3.2716e-02]],
[[ 3.1741e-02, -1.7069e-01, -1.0351e-02],
[-9.7888e-02, -1.6123e-01, -5.4630e-02],
[-6.8916e-02, -4.2834e-02, -1.3699e-03]],
[[-1.6180e-01, -2.0972e-01, -1.7769e-01],
[-1.6040e-01, -9.6406e-02, -2.0082e-01],
[-2.8054e-02, -1.1689e-01, -7.3856e-02]]],
[[[-4.5792e-02, -1.2043e-01, -1.6389e-01],
[-6.5307e-02, -2.0010e-01, -2.1425e-01],
[ 1.1165e-01, -1.7018e-01, -2.8097e-01]],
[[ 2.4502e-03, -1.4617e-01, -2.2199e-01],
[ 2.1944e-02, -2.3608e-02, -3.1099e-01],
[-7.3797e-02, -7.6290e-02, -3.2340e-01]],
[[-8.7203e-02, -1.2117e-01, -2.7879e-01],
[ 6.6053e-02, -5.3871e-02, -4.0045e-01],
[-3.7976e-02, -1.6449e-01, -9.8071e-02]],
...,
[[ 1.0005e-02, 8.5025e-02, 1.5236e-01],
[-2.4293e-02, 6.1165e-02, -5.4291e-02],
[-1.2552e-01, 6.2566e-02, 5.9047e-02]],
[[-6.6393e-02, 5.6195e-02, 1.1308e-01],
[ 2.5783e-02, 4.6010e-02, 9.1680e-02],
[ 6.9930e-02, -2.5407e-02, 5.5675e-02]],
[[-1.4944e-02, -1.0409e-01, 5.6999e-03],
[ 4.7882e-03, -1.0743e-01, -6.8495e-02],
[ 2.3618e-01, 9.2656e-02, 4.5788e-02]]],
...,
[[[-1.3759e-01, 2.7049e-03, -7.9909e-02],
[ 2.8031e-02, 1.4453e-02, -5.5996e-02],
[ 1.4998e-01, -1.1679e-02, -1.4608e-01]],
[[ 2.1672e-01, -5.3861e-02, 7.3709e-02],
[ 8.4929e-02, 4.3445e-02, 5.9796e-02],
[-2.4162e-02, 3.0742e-02, -6.8899e-02]],
[[ 3.8625e-02, -7.5427e-02, 5.2740e-02],
[ 9.2682e-02, 1.1766e-01, 9.2864e-02],
[-1.3879e-01, -1.4105e-02, -1.3122e-03]],
...,
[[ 7.9566e-02, 1.3083e-01, 8.1429e-02],
[-5.5141e-02, 8.9694e-02, -1.7687e-02],
[-1.9234e-01, -5.5994e-02, 1.0461e-01]],
[[ 9.7219e-03, 9.6929e-02, 9.2753e-02],
[ 9.4866e-02, 1.4751e-02, 7.3923e-02],
[-4.1690e-02, 1.0374e-01, 1.0237e-01]],
[[-2.2556e-01, -2.7430e-01, -1.9694e-01],
[-1.9588e-02, -2.9057e-02, 7.7500e-03],
[-2.5862e-01, -2.8358e-01, -2.4481e-01]]],
[[[-9.1657e-02, 1.0245e-01, -1.7187e-01],
[ 1.8031e-01, 1.3032e-01, 2.4875e-02],
[ 6.3358e-02, 3.0167e-02, -5.3758e-02]],
[[-8.2867e-02, -9.7574e-02, -5.8163e-02],
[ 1.2998e-02, 8.9369e-02, 8.0605e-02],
[-2.8113e-02, -9.6123e-04, 4.2219e-02]],
[[-4.4274e-02, 2.3184e-02, 1.1917e-01],
[-3.0130e-02, 3.5930e-02, 1.2892e-01],
[-3.0690e-02, 1.8464e-01, 2.1645e-02]],
...,
[[-1.5694e-01, -2.2109e-02, -9.1290e-02],
[-6.3509e-02, -8.5809e-03, 5.0231e-02],
[ 5.1115e-03, 1.4978e-01, 6.6539e-02]],
[[-1.9574e-01, -9.3081e-02, -6.5208e-02],
[-9.4363e-02, -1.1701e-01, -9.9407e-02],
[-4.9267e-02, 3.5482e-02, -5.4154e-02]],
[[-2.5461e-02, -5.8637e-02, -1.4412e-02],
[ 3.6325e-03, -8.6298e-02, 1.0870e-03],
[-1.8515e-01, -3.1757e-01, -2.8373e-01]]],
[[[ 2.2940e-02, -6.2830e-02, -4.2061e-03],
[ 3.1065e-02, 3.7827e-02, 1.1839e-02],
[-4.3153e-02, 9.1716e-03, -8.6439e-02]],
[[-7.6829e-02, -1.1800e-01, -8.6781e-02],
[-1.3080e-01, -8.9921e-02, 5.2576e-02],
[-1.4488e-02, -7.9454e-02, -9.9334e-02]],
[[ 4.4036e-03, -9.5831e-02, -2.7507e-02],
[-1.6174e-02, 4.3948e-02, -8.0171e-02],
[-9.6209e-02, -6.9976e-02, 6.6358e-02]],
...,
[[-6.4439e-02, 3.2324e-02, -8.7478e-02],
[ 7.6152e-03, -1.1078e-01, -4.3143e-02],
[-1.1573e-02, -1.8276e-02, -5.7322e-02]],
[[-3.4510e-02, 2.9707e-03, -5.2494e-02],
[-6.6018e-03, 9.1846e-03, -5.0195e-03],
[ 3.1090e-02, -4.4592e-02, -9.0475e-02]],
[[-1.1002e-01, -8.3339e-03, 8.3251e-03],
[-9.3409e-03, -1.9215e-02, -7.2640e-02],
[-9.2249e-03, -1.5913e-01, -9.9546e-02]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([-0.0090, 0.0435, 0.1143, -0.0095, 0.0507, 0.0721, 0.0410, 0.0590,
0.1165, -0.0613, 0.0569, 0.1223, 0.1489, -0.0795, -0.0971, 0.1147,
0.1002, 0.0299, -0.1699, -0.0246, 0.0696, 0.1806, 0.0938, 0.1576,
-0.0968, 0.2120, 0.0474, 0.0023, 0.0906, 0.2273, -0.0323, -0.1231],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[ 3.5559e-02, 3.5274e-02, -3.2634e-02],
[-3.6338e-02, 5.6116e-02, 7.3157e-03],
[ 5.2426e-02, 1.5922e-02, -2.4231e-02]],
[[ 2.2664e-02, 6.3653e-02, 5.7258e-02],
[ 1.1843e-03, -7.9600e-04, 2.3037e-02],
[-2.4494e-02, -4.0367e-02, 6.1424e-02]],
[[ 1.2277e-01, 9.9143e-03, -3.4152e-02],
[ 6.7528e-02, -6.5421e-02, -4.3681e-04],
[ 6.2874e-02, -5.7175e-02, -1.2814e-01]],
...,
[[-6.7781e-02, -8.2425e-02, -1.0110e-01],
[ 3.2471e-02, 4.9171e-03, -9.3354e-02],
[ 7.5355e-02, -5.3194e-02, -7.9378e-02]],
[[-3.4784e-02, -1.4300e-01, -5.6395e-02],
[-4.1736e-02, -6.3961e-02, -2.1281e-02],
[-1.5588e-03, -1.3330e-01, -4.3805e-02]],
[[-4.2550e-02, 4.7223e-03, -3.2727e-02],
[-2.3224e-02, -4.7982e-02, 1.7669e-02],
[-1.0733e-02, 4.8844e-02, 5.3792e-02]]],
[[[ 2.3280e-02, 7.6604e-02, -4.6965e-02],
[ 1.4205e-02, -1.8863e-02, 4.1874e-02],
[ 2.8221e-02, -4.6119e-02, -3.7712e-02]],
[[ 5.2517e-02, -7.0628e-03, -5.9523e-03],
[-1.4974e-02, -4.7057e-02, 3.0729e-02],
[-2.0375e-02, 5.4302e-02, 7.6858e-03]],
[[-4.1055e-02, -2.5339e-01, 1.6832e-02],
[-1.5282e-01, -1.7464e-01, 1.2827e-01],
[-2.0308e-01, 5.4524e-02, 2.2803e-01]],
...,
[[-3.7617e-02, -3.5474e-03, 2.1929e-01],
[ 8.7137e-02, 7.4001e-02, 2.8403e-01],
[-3.3501e-02, 7.5634e-02, 1.4651e-01]],
[[-9.6800e-02, -3.3176e-02, 1.1719e-03],
[-8.7004e-02, -1.3764e-02, -3.2047e-03],
[-1.0424e-01, 4.7754e-02, -1.6635e-02]],
[[ 1.7195e-03, -5.7114e-02, -4.2013e-02],
[ 8.7990e-03, -2.8726e-02, 5.7687e-02],
[ 1.5425e-04, -2.3975e-02, 1.8636e-02]]],
[[[-3.6848e-02, 8.1031e-03, -1.3952e-02],
[-1.7796e-02, -2.4654e-02, -3.6697e-02],
[ 1.3928e-03, 3.7718e-02, 1.2068e-02]],
[[-5.6103e-02, 5.7476e-03, -6.7678e-02],
[ 3.6230e-02, -4.3355e-02, 4.2984e-02],
[ 4.1516e-02, 1.9202e-02, -1.1985e-03]],
[[ 7.1082e-02, 6.0453e-02, 1.6574e-02],
[-1.2947e-02, 3.5662e-02, 1.3128e-01],
[-1.8604e-01, -8.1713e-02, 1.6090e-01]],
...,
[[-2.0232e-01, 4.7026e-02, -5.2175e-02],
[-4.6559e-01, -2.7726e-01, -3.5265e-01],
[-1.4323e-01, 4.7101e-02, -1.0125e-01]],
[[ 6.7277e-03, -6.5162e-02, -1.3727e-01],
[-9.0810e-03, -2.1001e-02, -1.9667e-04],
[ 6.0346e-02, 1.0353e-01, 1.2385e-01]],
[[-2.5196e-02, 6.8853e-02, 9.0692e-02],
[-2.9065e-02, -2.7825e-02, -5.4925e-02],
[-4.2684e-02, 4.0033e-02, -5.7118e-02]]],
...,
[[[ 5.0872e-02, 4.6986e-02, -1.3631e-02],
[-2.9946e-02, -6.2462e-03, 5.1554e-02],
[-2.7095e-02, 3.9340e-02, 4.6724e-02]],
[[ 3.0069e-02, -4.7281e-02, 1.1609e-02],
[-9.2982e-03, 4.1813e-02, 5.0424e-02],
[-1.7831e-02, -3.3969e-02, 5.3147e-02]],
[[ 9.5100e-03, -3.1215e-02, -2.4226e-02],
[ 2.4513e-02, -7.5835e-02, -7.3438e-02],
[-1.2918e-01, -2.3356e-01, -1.4852e-01]],
...,
[[ 6.5318e-02, -1.1108e-01, -4.7450e-02],
[ 6.1423e-02, -7.6041e-02, -1.1609e-01],
[-2.7467e-02, -1.4643e-01, -7.6933e-02]],
[[ 1.6920e-02, 1.0872e-01, 5.7432e-02],
[-4.0607e-03, -9.5134e-02, 2.1578e-04],
[-4.2530e-02, -4.5765e-02, -7.7042e-02]],
[[ 4.1738e-04, -4.1330e-02, -3.9871e-02],
[ 4.7424e-02, 5.5882e-02, 4.7602e-02],
[-8.2118e-03, -4.8224e-03, 4.4901e-04]]],
[[[ 9.8375e-03, -3.6521e-02, -2.3219e-02],
[-4.9639e-02, 3.2556e-02, -1.3188e-02],
[-7.9628e-02, -6.2007e-02, -1.0419e-01]],
[[ 4.0558e-02, -9.9426e-03, -5.0132e-03],
[ 4.5132e-03, 3.2323e-02, 6.2585e-03],
[ 3.8125e-02, -1.0458e-04, -3.6314e-02]],
[[ 4.7150e-02, 4.4216e-02, 1.1542e-01],
[ 1.0945e-01, 6.0106e-02, 8.8901e-02],
[-1.5126e-02, 1.6408e-02, 2.6989e-02]],
...,
[[-1.5489e-01, -9.1927e-04, -3.9162e-02],
[-1.2884e-01, 2.4859e-03, 6.1428e-02],
[-3.5108e-01, -1.0877e-01, -4.0350e-02]],
[[ 2.6814e-02, 8.0993e-02, -2.2237e-02],
[-1.5351e-01, 5.0270e-02, 4.0902e-02],
[-1.0787e-01, 3.0855e-02, 1.0168e-01]],
[[ 3.1024e-02, 4.5026e-02, -3.3429e-02],
[ 4.0177e-02, 2.5185e-02, 3.1419e-02],
[ 1.6468e-02, 6.5401e-02, 3.9235e-02]]],
[[[ 4.5067e-03, 2.9995e-02, 1.1123e-02],
[-4.0659e-02, -2.5555e-02, -4.1949e-02],
[-1.7414e-02, 3.0913e-02, -4.5757e-02]],
[[ 3.0342e-02, 1.4000e-02, -2.0311e-02],
[-3.6799e-02, -5.0068e-02, -4.3416e-02],
[ 3.4380e-02, -4.2739e-02, 2.8302e-02]],
[[-2.0808e-02, -3.3298e-02, 1.6003e-02],
[-5.4683e-02, -3.4360e-02, -5.3534e-02],
[ 5.5169e-02, 7.4382e-04, 2.2630e-02]],
...,
[[-4.6237e-02, -1.3282e-02, -1.0170e-01],
[-7.2085e-02, -1.3459e-01, -5.9193e-02],
[-5.1387e-02, -3.6412e-02, -5.1780e-02]],
[[-3.8854e-02, -4.4536e-02, -9.2345e-02],
[-3.3871e-02, -1.2102e-01, -1.3097e-02],
[-1.1332e-01, -9.4372e-04, 4.4815e-02]],
[[-5.5748e-02, 4.3091e-02, 9.1060e-03],
[-6.3378e-02, -6.1069e-02, 3.2368e-03],
[-7.2674e-03, 1.4812e-04, -3.3107e-02]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([-0.1201, 0.0729, 0.0884, 0.0204, -0.1527, 0.0565, -0.0851, -0.1046,
-0.1227, -0.0861, -0.1027, 0.0850, 0.0425, 0.0735, -0.2827, 0.0560,
-0.1780, 0.0310, -0.0449, -0.1227, -0.1464, 0.0064, -0.0340, -0.0367,
0.0038, 0.0703, 0.0695, 0.0288, -0.0470, -0.1361, -0.2126, -0.1131],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[-1.4974e-02, 1.6159e-02, -2.1692e-02],
[-1.1054e-01, 1.0090e-01, 6.5374e-02],
[ 2.5876e-02, 8.9258e-02, 1.6851e-02]],
[[-8.8154e-02, -5.6921e-02, 8.3405e-02],
[-2.0680e-02, 1.4311e-03, 3.6550e-02],
[-6.2781e-02, -1.9149e-02, -3.8835e-02]],
[[ 4.5904e-02, -1.3570e-01, -6.0750e-03],
[-1.9463e-01, -2.2786e-01, 6.9103e-02],
[-1.4038e-01, -2.0523e-01, 3.7409e-03]],
...,
[[ 1.3204e-01, 7.4625e-02, 1.2842e-01],
[ 4.6032e-02, 8.2025e-02, 1.6194e-01],
[ 2.1155e-02, -3.2194e-03, 9.9827e-02]],
[[-1.4951e-01, 4.7424e-02, 1.5208e-01],
[-1.3172e-01, -1.2731e-02, -8.2174e-03],
[-3.0113e-02, 3.2342e-02, 3.3055e-03]],
[[ 4.3621e-03, -6.9220e-02, 3.6431e-02],
[ 4.1395e-02, -6.3576e-02, 1.0546e-01],
[-1.7954e-02, 3.4716e-02, 3.0719e-04]]],
[[[-1.0603e-04, -6.6601e-02, 5.8396e-02],
[ 8.1521e-02, -2.0973e-03, -1.2311e-03],
[-1.4602e-02, 3.0342e-02, 1.9852e-02]],
[[-5.2913e-02, 2.6487e-02, 1.0752e-02],
[ 1.1287e-01, 1.4226e-01, 9.8487e-02],
[ 7.5047e-02, 1.4104e-01, -6.1884e-03]],
[[ 1.2576e-01, 5.9752e-02, 1.5918e-01],
[-1.2397e-01, -1.7159e-01, -9.1228e-02],
[ 1.2227e-01, -2.0917e-02, -9.7001e-02]],
...,
[[ 1.1487e-01, 2.5749e-02, -8.4589e-02],
[-7.5378e-03, -2.8421e-02, -6.8609e-02],
[ 1.8780e-02, -4.1813e-02, -5.4226e-02]],
[[-3.2611e-02, -7.2332e-02, -5.8439e-02],
[ 7.5412e-02, 4.0328e-03, -1.4659e-01],
[ 1.5877e-01, 8.9977e-02, -2.1598e-01]],
[[-3.0953e-02, 1.0444e-02, 1.9809e-02],
[ 9.7567e-03, -4.7959e-02, 4.8715e-02],
[ 2.2698e-02, 3.4452e-02, -3.2624e-02]]],
[[[-2.1878e-02, -9.3774e-02, -1.0309e-01],
[ 1.3793e-01, 2.4065e-02, 2.4753e-03],
[ 8.9010e-02, 2.2375e-02, -6.9035e-02]],
[[-5.6316e-02, 4.7012e-02, -1.9392e-02],
[-8.8560e-02, -3.7857e-02, -6.5616e-02],
[-6.4065e-02, -1.0625e-02, -9.5144e-02]],
[[-9.9342e-02, -1.6114e-01, -2.7639e-02],
[-8.2803e-02, -1.0070e-01, 3.1821e-02],
[ 5.2095e-02, -1.0721e-01, 3.1739e-03]],
...,
[[-2.1487e-02, 2.9866e-02, 6.1278e-02],
[ 6.9886e-02, -4.0941e-02, 1.0795e-01],
[ 4.9959e-02, 1.1619e-01, -3.7782e-02]],
[[-2.2585e-01, -1.7626e-01, -2.9146e-03],
[-2.3104e-01, -6.6904e-02, -6.6845e-02],
[-2.4099e-01, -7.9249e-02, -7.6774e-02]],
[[ 1.8142e-02, -7.2950e-03, -1.3692e-02],
[ 3.3579e-02, 5.6316e-02, 1.8108e-02],
[ 1.9030e-02, -5.1642e-02, 5.4077e-02]]],
...,
[[[-5.3189e-02, 1.8239e-02, 2.1896e-02],
[ 1.2110e-01, 4.9274e-02, 4.3501e-02],
[ 5.7318e-02, 1.2617e-01, 1.0614e-01]],
[[ 3.1533e-03, 4.6356e-03, 9.0294e-02],
[ 7.7716e-02, 1.0116e-01, 1.3330e-01],
[ 2.1456e-02, 5.0572e-02, -5.3970e-02]],
[[ 1.5189e-01, 1.2349e-01, 6.2511e-02],
[-1.3895e-02, -4.1350e-02, 1.5350e-01],
[-2.5537e-02, 7.3825e-02, 1.2738e-01]],
...,
[[ 7.1021e-02, 4.0699e-03, -1.5127e-02],
[ 4.2715e-02, 2.5351e-02, -6.8456e-02],
[ 2.4252e-02, -1.0033e-02, -3.2340e-02]],
[[-1.2776e-03, 3.2009e-02, 2.1810e-02],
[-9.0066e-02, -1.2076e-02, 4.5009e-02],
[ 1.7046e-02, 1.5869e-01, 2.6431e-02]],
[[-2.2267e-02, -1.3627e-01, 2.5387e-02],
[ 4.4897e-02, -9.7506e-02, -1.4287e-01],
[-6.0345e-02, -2.8279e-02, 1.0743e-03]]],
[[[-1.2966e-02, -1.0234e-01, 5.7226e-02],
[-5.1346e-02, -1.2057e-02, -6.3904e-02],
[-6.5721e-02, -1.2616e-01, 8.2800e-02]],
[[-3.0592e-02, -1.0024e-01, -1.4793e-02],
[-1.9146e-02, 4.5828e-02, -1.9429e-02],
[ 8.0257e-02, 1.1782e-01, -3.0161e-02]],
[[-5.5247e-04, -6.4090e-03, 1.0119e-01],
[-1.3298e-01, 6.9116e-02, -5.2279e-02],
[-5.8447e-02, -7.3476e-03, 3.1630e-01]],
...,
[[-9.3010e-02, 7.9068e-02, -2.2535e-02],
[-8.9478e-02, -4.8209e-02, 6.1327e-03],
[-1.0712e-01, -1.0757e-01, 9.8139e-03]],
[[ 9.0832e-03, -1.2409e-01, 3.2423e-02],
[-3.9535e-03, -4.0250e-02, -6.6551e-02],
[ 6.8397e-03, 9.1716e-03, 6.7222e-02]],
[[-8.2337e-03, -1.6657e-02, 1.2545e-01],
[-1.0118e-01, 3.1294e-02, 1.5574e-01],
[-1.0487e-02, -6.8650e-03, 8.5572e-02]]],
[[[-4.3698e-02, 4.6679e-02, -2.6565e-02],
[-2.1501e-03, -2.6999e-02, -4.4960e-02],
[ 4.1027e-02, -3.6543e-02, 1.0589e-02]],
[[-1.8102e-02, -5.5023e-02, -5.3675e-02],
[-2.1185e-02, -1.9392e-02, 3.7565e-02],
[-2.5724e-02, -8.8883e-03, -5.9184e-03]],
[[-1.4779e-02, -7.6012e-02, 3.3720e-02],
[ 1.1209e-02, -9.6086e-03, 6.7264e-03],
[-1.7801e-02, -3.3564e-02, -5.8955e-03]],
...,
[[-1.2809e-02, -5.5577e-02, 4.5194e-02],
[-3.7489e-02, 7.6621e-03, -2.0257e-02],
[ 5.3113e-02, -1.2500e-02, -2.8014e-02]],
[[-4.3228e-03, -5.0019e-02, -4.1148e-02],
[-4.1540e-02, -4.5005e-02, -2.0184e-02],
[-4.9982e-02, -2.6052e-02, 1.0077e-02]],
[[-3.0751e-02, -4.9143e-02, 4.7179e-02],
[ 5.1609e-02, -2.7043e-02, -4.2536e-02],
[-3.9209e-02, 6.0066e-03, 1.9457e-03]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([ 0.1022, -0.0473, 0.0672, -0.1241, -0.0325, -0.2341, -0.0648, 0.0962,
0.2217, 0.1109, -0.0378, 0.0438, 0.1387, -0.0746, 0.1567, -0.0807,
-0.1354, 0.0537, 0.0059, 0.1373, 0.2127, -0.2179, -0.0746, -0.1148,
0.1288, -0.0236, 0.0356, 0.2448, 0.0014, 0.0694, -0.1509, -0.1077],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[ 7.7399e-02, -9.6949e-02, -2.7073e-01],
[ 2.3883e-02, -2.0506e-01, -8.5156e-02],
[ 1.7268e-01, 9.5144e-02, -2.5118e-02]],
[[-2.6565e-02, 7.2141e-02, -6.1081e-03],
[-2.6261e-02, 1.2622e-02, 4.8862e-02],
[ 9.7061e-02, -4.2215e-02, 8.0684e-03]],
[[ 5.5229e-02, 1.8127e-02, -2.5245e-02],
[ 1.4627e-01, 6.4376e-02, 5.6496e-02],
[-1.8184e-01, -1.0913e-02, 1.2816e-01]],
...,
[[ 2.8374e-02, 4.7780e-02, 7.5687e-02],
[-1.0902e-02, 7.6264e-02, 1.8673e-01],
[ 1.9496e-01, 9.3792e-03, -3.5714e-02]],
[[-1.7163e-01, -8.3406e-02, -9.4540e-02],
[ 3.7697e-02, -6.2919e-02, -6.9527e-02],
[ 1.5308e-03, 5.0065e-02, -1.5285e-01]],
[[ 3.6940e-02, -4.9373e-03, -7.6827e-02],
[-3.1859e-02, 1.6486e-02, 4.2789e-03],
[ 3.0798e-02, 3.3960e-02, -6.8066e-02]]],
[[[-2.7672e-02, 4.1359e-02, 1.3456e-01],
[ 3.5444e-02, -4.5565e-02, 7.1767e-02],
[ 6.5571e-02, -1.2423e-01, 3.3461e-02]],
[[ 7.2180e-02, -1.3789e-01, -2.1212e-01],
[ 1.0550e-01, -3.7983e-02, -6.2984e-03],
[ 1.7195e-01, -3.1983e-02, 3.0495e-02]],
[[-1.0200e-01, 1.4251e-01, -4.1532e-02],
[-1.9126e-01, 4.2599e-02, 5.7218e-02],
[-1.0718e-02, -6.3790e-02, 4.3510e-02]],
...,
[[-3.0051e-02, -4.7181e-02, 4.6318e-03],
[ 1.0634e-01, 5.8178e-02, 3.8082e-03],
[ 8.1873e-02, 7.9430e-02, 1.0053e-01]],
[[-1.0109e-01, 7.2745e-04, -1.3074e-01],
[-1.2903e-01, -1.4736e-01, -7.4363e-02],
[-3.7653e-02, 1.5599e-02, 7.7372e-02]],
[[-2.9931e-02, -1.1739e-02, 6.1795e-02],
[-3.6314e-04, 1.9943e-02, 3.9908e-03],
[-1.0793e-02, 5.7973e-02, 5.1538e-02]]],
[[[ 3.8306e-02, -9.8213e-02, -1.7241e-01],
[-1.6559e-01, -1.9584e-01, -6.2141e-02],
[-3.0662e-01, -1.7542e-01, 7.1864e-02]],
[[ 4.0030e-02, -1.7666e-01, -7.9838e-02],
[-1.5894e-01, -3.2016e-01, -2.6969e-01],
[-7.3881e-02, -3.0640e-02, -5.3187e-02]],
[[-4.0802e-02, -1.0738e-01, -3.3106e-02],
[-1.5753e-01, -2.4391e-01, -9.3499e-02],
[ 5.5755e-02, -4.9650e-02, 1.2502e-01]],
...,
[[-6.0072e-02, -1.5812e-01, -1.8761e-01],
[-5.1800e-02, -2.1693e-01, -1.9677e-02],
[ 5.0556e-02, 9.1450e-02, 1.5961e-01]],
[[ 4.2619e-03, 9.5477e-02, 7.7651e-02],
[-4.7559e-02, 1.0212e-01, -4.1138e-02],
[-1.8507e-01, -7.2547e-02, -4.2812e-02]],
[[ 2.4319e-02, 4.6071e-02, 1.6746e-02],
[ 2.6823e-02, 3.5240e-02, -4.0805e-02],
[-1.7033e-02, 2.3640e-02, -4.6770e-02]]],
...,
[[[-1.0673e-01, -1.2792e-01, 1.1733e-01],
[-4.0886e-02, 9.0394e-03, -7.2699e-02],
[ 8.0294e-02, 3.0975e-03, -1.0630e-01]],
[[-8.5543e-02, 6.4479e-02, 1.0187e-01],
[-1.0312e-01, -2.6099e-03, -5.6255e-03],
[-1.5112e-01, -1.8001e-01, -1.1885e-01]],
[[ 4.6543e-02, -6.2894e-02, -2.4763e-01],
[ 2.1534e-02, 5.3513e-02, 2.6184e-02],
[ 1.1577e-01, 4.0263e-02, -5.8962e-03]],
...,
[[ 8.3211e-02, 7.4536e-02, 5.0046e-02],
[ 5.9515e-02, -6.8235e-02, 5.9160e-02],
[-1.0733e-01, -7.9521e-02, -6.1005e-02]],
[[-1.7170e-02, 1.0127e-01, 7.6722e-03],
[ 8.2933e-02, 3.0542e-02, 3.3276e-02],
[ 4.8455e-02, -7.8862e-02, 1.0048e-01]],
[[ 3.0983e-02, 4.8264e-02, 3.3730e-02],
[ 1.3927e-02, 1.9908e-02, 3.8934e-02],
[-3.1516e-02, -2.5438e-02, -7.0652e-02]]],
[[[-1.0551e-01, -9.3720e-02, -4.6565e-02],
[ 2.9706e-02, 3.9978e-02, -8.2824e-02],
[ 1.8950e-02, -1.3601e-01, -3.2186e-01]],
[[-6.7598e-02, -8.3918e-03, -6.5337e-03],
[-6.5989e-02, -8.0916e-02, -9.8359e-02],
[-1.0008e-01, -7.3763e-02, -1.2175e-01]],
[[-2.1229e-01, 4.9892e-02, 5.0501e-02],
[-1.0274e-01, -1.4310e-01, -1.5288e-01],
[-1.4382e-01, -1.9195e-01, 7.7643e-03]],
...,
[[ 3.3352e-02, -4.3882e-02, -2.6715e-02],
[-3.0410e-02, -1.0542e-01, -1.9721e-01],
[ 2.7308e-02, -5.0915e-02, -8.7361e-02]],
[[ 6.3617e-02, 6.8998e-02, 1.1524e-01],
[ 3.1577e-02, -8.2297e-02, -4.1428e-02],
[ 8.7019e-03, -1.6705e-01, -3.6697e-02]],
[[-5.4677e-02, -1.8781e-02, -3.7745e-03],
[-6.3388e-02, 7.7077e-03, -3.0583e-02],
[-6.8861e-02, -4.9895e-02, 4.4517e-02]]],
[[[-2.1107e-01, -2.0541e-01, -5.7970e-02],
[-2.8435e-03, -3.6323e-02, -7.8163e-02],
[ 7.4566e-02, -6.5206e-02, -2.2582e-01]],
[[-1.6473e-01, 5.7136e-02, 1.0264e-01],
[ 5.2175e-02, -8.5533e-02, -7.9778e-02],
[-3.6050e-02, -3.7641e-01, -2.1087e-01]],
[[ 1.4119e-01, 1.0987e-02, -9.5780e-02],
[ 1.7285e-01, 4.2861e-02, -7.8329e-02],
[ 1.1475e-01, 1.1943e-01, -1.2878e-01]],
...,
[[-2.6446e-01, -1.5293e-01, -2.1115e-02],
[-3.9838e-02, -2.4160e-01, -3.1906e-02],
[ 8.1045e-02, -3.9095e-02, -1.3858e-01]],
[[ 5.2117e-02, -1.1244e-01, -1.2897e-02],
[ 7.1000e-02, -2.3224e-02, -2.8387e-01],
[ 9.4481e-02, -1.0296e-01, -1.6276e-01]],
[[-5.5738e-02, -7.9504e-03, -3.1737e-02],
[-4.4244e-03, -3.1451e-02, -1.9995e-02],
[ 9.0058e-03, 3.1966e-02, -5.5357e-02]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([ 0.0865, -0.0350, 0.0031, -0.0796, 0.1037, 0.1699, 0.1798, 0.0644,
-0.0688, 0.0581, 0.0025, -0.0429, 0.1128, 0.0399, -0.0521, -0.0590,
-0.0387, 0.1770, -0.0931, -0.0709, -0.0489, 0.1220, -0.0023, 0.1366,
0.0476, -0.0629, -0.1027, -0.0310, 0.1637, 0.0307, -0.1593, -0.0791],
device='cuda:0')),
('fc_layers.0.weight',
tensor([[-0.0044, -0.1157, -0.0458, ..., -0.0528, -0.0571, -0.0700],
[-0.0373, -0.0494, -0.0567, ..., 0.0021, -0.0292, -0.0083],
[ 0.0149, 0.1563, 0.0135, ..., -0.1773, -0.1033, -0.0262],
...,
[-0.0194, -0.0296, -0.0644, ..., -0.0653, -0.0521, -0.0539],
[ 0.0567, 0.0332, 0.0084, ..., -0.0005, -0.0791, -0.0806],
[ 0.0044, -0.0257, -0.0039, ..., -0.0955, -0.0786, 0.0037]],
device='cuda:0')),
('fc_layers.0.bias',
tensor([-2.1525e-02, 2.4826e-02, 9.4047e-02, -4.7163e-02, 6.4372e-02,
5.2006e-02, -5.2554e-02, 2.1731e-01, -1.7082e-02, -3.9128e-02,
-1.9554e-04, 1.6688e-01, 5.9895e-02, 3.1593e-02, -4.4526e-02,
-8.0763e-02, -2.9223e-02, 7.8286e-02, -6.5479e-04, -8.6864e-02,
2.7289e-02, -4.7260e-02, 4.6764e-02, 3.4324e-02, -3.2748e-02,
-5.7316e-02, 1.0255e-01, 1.5001e-01, 2.0493e-02, 2.6225e-02,
9.7465e-02, 3.7038e-03, -2.1210e-02, -4.4652e-02, -4.3157e-02,
5.2283e-02, 1.9521e-02, 1.5320e-02, -3.2876e-03, 1.5703e-01,
-1.7891e-02, 1.7069e-02, -4.6872e-02, 1.0920e-02, -2.2992e-02,
-7.0291e-02, -7.9841e-02, 2.2088e-02, 9.9825e-03, 7.8665e-02,
5.2278e-02, -8.4471e-03, -1.0461e-02, -6.1443e-02, -4.0272e-02,
7.1016e-02, -2.4105e-02, 1.3946e-01, 1.3623e-01, 8.1351e-02,
-2.4316e-02, 1.6502e-02, -4.2957e-02, 2.2715e-02, 5.5536e-02,
-5.1628e-02, -5.2875e-02, -5.8817e-02, -1.6936e-02, -1.7609e-02,
1.8061e-01, 5.0295e-02, -3.5254e-03, 1.1319e-01, -2.0799e-02,
-3.8267e-03, 9.0390e-03, 7.2155e-03, 9.3028e-02, 9.7563e-02,
2.1563e-01, -1.6979e-02, -1.1607e-03, -1.3282e-02, 4.9498e-02,
4.0107e-02, 7.6114e-02, 1.6674e-01, 1.3116e-01, -2.1768e-02,
3.2032e-02, 1.8335e-01, -8.1363e-02, -3.9042e-02, -9.4826e-03,
-7.3121e-02, -2.3571e-03, -2.2120e-02, -4.3624e-02, -7.3387e-02,
1.0340e-01, 1.8853e-02, 1.9281e-01, 1.4484e-01, -7.2334e-02,
1.5703e-01, -2.7804e-02, 1.2288e-02, -2.2190e-02, -5.4153e-02,
1.3588e-01, -5.8621e-02, 9.8644e-02, 7.1918e-02, 4.1401e-02,
1.5571e-01, -8.2656e-02, 3.1672e-02, -7.6764e-02, 3.3219e-02,
-4.2180e-02, -2.2879e-02, 1.3282e-02, 1.4846e-01, 6.6418e-02,
6.0300e-02, 1.7182e-03, 5.9438e-02, -4.6418e-02, 1.2317e-03,
-1.1756e-02, -5.2129e-02, 2.2119e-02, 9.9196e-03, 9.9879e-02,
-3.6976e-02, -2.7934e-02, -5.1991e-02, 2.3464e-01, -4.8674e-02,
2.1497e-01, -2.9932e-02, 2.8322e-02, 1.2611e-01, 1.3494e-01,
-3.7552e-02, -8.6478e-02, -3.2640e-02, -4.8975e-02, -5.8636e-02,
-4.2363e-02, -8.4145e-03, 4.9538e-03, 4.8106e-03, 1.5043e-02,
-2.6803e-02, 8.8823e-03, -3.2457e-02, -6.5314e-02, 5.6729e-03,
6.5448e-03, -1.9005e-02, -1.0866e-02, 4.5358e-02, 1.0465e-02,
-3.6849e-03, 1.1857e-01, 2.2842e-02, -1.9492e-03, -6.7853e-02,
2.6386e-02, 5.0207e-02, -5.3882e-02, 2.8240e-01, -1.9416e-02,
2.0183e-03, 9.6777e-02, -9.2690e-02, 4.6893e-02, 5.8579e-02,
1.8185e-03, 1.3131e-01, -3.9590e-02, 7.1585e-02, -1.2077e-01,
-4.3498e-02, -1.0474e-01, -1.9953e-02, -8.1553e-03, 5.6119e-02,
2.6391e-02, 7.0841e-02, -6.6561e-02, 1.6734e-01, -8.1174e-02,
7.8208e-02, -7.9344e-02, -5.7535e-02, 2.2667e-02, 1.3515e-01,
-4.5446e-02, -5.8292e-02, -1.0738e-01, 2.8606e-02, 6.3153e-02,
-5.8171e-02, 1.0821e-01, -4.7517e-03, 2.2678e-01, 2.4434e-02,
-5.6863e-02, 1.5871e-02, -2.6147e-02, 6.5118e-02, 1.5772e-02,
-3.1208e-02, -7.9500e-03, 5.5096e-03, 5.0950e-02, -3.6232e-02,
-3.5966e-02, -1.9166e-03, 1.3935e-01, 8.1538e-02, -2.1646e-02,
1.0822e-01, 1.1515e-01, -3.8459e-02, 5.4804e-02, 1.5928e-02,
-7.7124e-02, -4.8065e-02, -4.8323e-02, 7.2910e-02, -2.5591e-02,
-1.4420e-02, -3.2400e-02, -1.6336e-01, 4.9055e-02, 4.1763e-03,
7.9569e-02, -6.0104e-02, 6.4422e-03, -3.7846e-02, -6.8974e-02,
-3.8254e-02, 1.3848e-01, 7.8892e-02, -5.1627e-02, 2.7535e-01,
-7.4273e-02, 5.4435e-02, -6.7893e-02, 2.7516e-02, -2.9696e-03,
1.7141e-02], device='cuda:0')),
('fc_layers.2.weight',
tensor([[ 0.0485, -0.0038, -0.1200, ..., -0.0251, -0.0864, 0.0407],
[-0.0317, -0.0226, -0.1060, ..., -0.0370, -0.1211, -0.0345],
[ 0.0019, 0.0220, -0.0207, ..., -0.0555, -0.0422, 0.0332],
...,
[-0.0507, 0.0152, 0.0037, ..., 0.0450, -0.1385, -0.0238],
[ 0.0024, -0.0236, 0.0860, ..., -0.0301, -0.0544, -0.0060],
[-0.0014, -0.0153, 0.0154, ..., -0.0226, -0.1131, -0.0364]],
device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.1535, -0.0384, 0.1796, 0.0553, 0.1263, -0.0220, -0.0360, -0.1237,
-0.0598, -0.1414], device='cuda:0'))])},
{'ratio': 0.58,
'bias': 64,
'train_losses': [286.219930032577,
263.8363492235047,
212.09496666831703,
187.4530771094883,
171.51186335898194,
162.7032038073265,
157.40945342281935,
153.40648934978466,
149.16310287312479,
147.2651434287975,
144.6846261453046,
143.86420903457605,
141.65527468638894,
141.61568910580448,
139.24048380847586],
'test_losses': [283.80153096890916,
229.44990303002152,
193.3061567661809,
180.29703989215926,
163.64417144364,
157.5588331783519,
151.90740606831568,
150.91824606353163,
143.22210745717965,
143.63024104342742,
144.4630786110373,
138.1613200926313,
139.68362012563966,
135.69813620810416,
134.21858276105394],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[-6.0450e-02, -2.2510e-02, 1.5873e-01],
[-2.1995e-01, -1.8779e-01, 2.8978e-01],
[-2.6385e-01, 1.1759e-01, 1.6553e-01]],
[[-4.4610e-02, -1.9729e-01, 2.2418e-01],
[-2.5088e-01, -5.9383e-02, 7.4031e-02],
[-1.1473e-02, 1.0730e-01, 3.6939e-02]],
[[-1.5115e-02, -1.0381e-01, 2.3167e-01],
[-1.3122e-01, 1.7066e-01, -5.2357e-02],
[-3.4922e-02, -8.8694e-02, 8.5001e-02]]],
[[[ 5.7497e-02, 4.7326e-02, 6.9923e-02],
[ 1.5306e-02, -1.4440e-01, 2.2768e-01],
[ 8.1410e-03, -3.1888e-02, -3.7676e-02]],
[[ 2.3947e-03, -3.2122e-02, 2.0606e-01],
[ 3.2555e-01, -7.5292e-03, -2.3425e-01],
[ 1.0612e-01, -1.6826e-01, -1.4881e-01]],
[[-2.8664e-02, -1.9303e-02, 3.2011e-02],
[ 9.8808e-02, -6.6375e-02, -1.9415e-01],
[ 1.4338e-01, -1.3101e-01, -1.5854e-01]]],
[[[ 1.5023e-01, 9.8933e-02, -2.4647e-01],
[ 1.4604e-01, 3.6617e-02, -1.6349e-01],
[ 2.3038e-02, 9.7591e-02, -7.9761e-02]],
[[ 9.9040e-03, -6.0116e-02, -9.0177e-03],
[ 1.9849e-01, 3.2925e-02, -2.7642e-01],
[ 1.1198e-01, -2.0957e-01, -1.4035e-01]],
[[ 1.2396e-01, 1.2827e-02, 9.0200e-02],
[ 1.4463e-01, -5.5014e-03, -9.6892e-02],
[ 7.0848e-02, -2.0424e-01, 8.3934e-02]]],
[[[ 3.0150e-02, -4.5877e-02, -2.6463e-01],
[-2.9649e-02, 1.0824e-01, -1.9684e-01],
[ 1.4970e-01, 6.2428e-02, 2.0307e-01]],
[[-1.2826e-03, -3.4190e-01, -2.1913e-01],
[ 6.7421e-03, 1.3023e-01, 3.1340e-02],
[ 1.5131e-01, 3.8475e-02, 2.7358e-01]],
[[-1.1773e-01, -2.7839e-02, -1.2070e-01],
[ 8.6017e-02, -1.5645e-01, -1.2339e-01],
[-2.8621e-02, 1.5446e-01, 1.8427e-01]]],
[[[-2.2039e-01, 8.2093e-02, -2.6788e-02],
[ 2.0794e-01, 2.5019e-01, 7.8980e-02],
[-1.0094e-01, -1.2924e-01, -1.8548e-01]],
[[-6.9354e-02, 1.7561e-01, 8.7696e-02],
[-1.0302e-01, 1.7003e-01, 1.9043e-01],
[ 2.4802e-03, 4.1905e-02, -1.6792e-01]],
[[ 8.1264e-02, -1.5698e-01, 4.5275e-02],
[-1.6946e-01, 7.8592e-02, 1.1412e-01],
[-6.0019e-02, -1.5331e-01, 6.1336e-02]]],
[[[-1.3264e-01, -1.2484e-01, -2.6462e-01],
[ 3.1613e-03, 6.9041e-02, 3.3852e-02],
[ 2.1618e-01, 1.3163e-01, 9.3881e-02]],
[[-2.6879e-01, -1.7490e-01, -1.9104e-01],
[ 5.3153e-02, -3.3643e-02, -8.1384e-02],
[ 2.1570e-01, 2.2066e-01, 2.1523e-01]],
[[-1.7888e-01, -2.4683e-01, 6.9462e-02],
[-7.0979e-02, 1.5920e-01, 1.3582e-01],
[ 2.5615e-01, 1.2579e-02, -7.2248e-02]]],
[[[ 6.4068e-02, -3.3193e-02, -2.1399e-01],
[ 1.1818e-01, -1.3503e-01, 9.1080e-02],
[ 8.7268e-02, -1.8289e-02, 2.2383e-02]],
[[ 1.4839e-02, -1.8508e-04, -2.9682e-01],
[ 1.9803e-01, -5.4633e-03, 1.1307e-02],
[ 3.2225e-01, 1.1996e-01, -1.2702e-01]],
[[ 5.3558e-02, 9.7115e-02, -1.6299e-01],
[-3.4402e-03, -1.4222e-01, -2.1125e-01],
[ 7.6240e-02, -1.3059e-02, 1.1669e-01]]],
[[[ 1.4657e-01, -4.2740e-02, -1.2827e-01],
[ 9.1022e-02, 1.1225e-01, -9.7914e-02],
[-1.4029e-01, -2.7116e-01, -4.4282e-03]],
[[-2.5044e-01, -9.0044e-02, 1.0843e-01],
[ 5.8032e-02, 2.3653e-01, 1.7530e-01],
[ 6.4744e-02, 1.9900e-01, 1.6075e-01]],
[[-2.8413e-01, -5.6262e-02, -6.1661e-02],
[-1.6670e-01, 6.2807e-02, 2.0882e-01],
[-1.6598e-01, -7.1248e-03, 9.1303e-02]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([ 0.2041, -0.3842, 0.1396, -0.3276, 0.2928, 0.2418, 0.2519, -0.2430],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[ 9.0753e-02, -8.6791e-02, -1.6830e-02],
[ 7.2008e-03, -4.9800e-02, -2.3483e-01],
[ 5.5454e-02, -3.9312e-02, -1.4137e-01]],
[[-2.5383e-02, 3.5748e-01, 2.0223e-02],
[-7.1728e-02, 1.1514e-01, -2.0332e-01],
[-1.5924e-01, -1.4130e-01, -4.3478e-01]],
[[-8.1248e-02, -1.4580e-01, 3.6918e-02],
[-5.8317e-02, -1.6287e-01, -1.1554e-01],
[ 4.0967e-02, 1.0158e-04, 5.0065e-02]],
...,
[[-1.6229e-02, 3.6081e-02, -2.0604e-02],
[-2.3812e-01, -5.1693e-02, -2.0367e-01],
[-2.7840e-02, -2.3342e-01, -2.1178e-02]],
[[ 1.9317e-02, 6.8813e-02, 9.6589e-02],
[-2.0621e-01, 5.7692e-02, 9.0771e-02],
[-1.8107e-01, 8.9298e-02, 2.7473e-02]],
[[-2.7464e-02, 1.4253e-01, 2.7660e-01],
[-1.8457e-01, -2.2295e-02, 2.1370e-01],
[-9.9005e-02, -1.3865e-01, -3.9263e-02]]],
[[[-2.0549e-04, 2.5932e-02, -1.3415e-01],
[-2.1496e-01, -2.0629e-02, -1.8253e-01],
[-2.0626e-01, 6.8932e-02, -2.3308e-01]],
[[-1.0794e-01, -1.4663e-01, -1.6049e-01],
[-2.1212e-01, -1.6226e-01, -2.7378e-01],
[-2.8782e-01, -2.8074e-01, -1.6447e-01]],
[[-1.1500e-01, -9.9738e-02, 5.9111e-02],
[-4.6015e-03, 1.1021e-01, -1.9608e-03],
[-9.1052e-02, 2.6790e-02, 7.9072e-02]],
...,
[[ 1.9596e-01, 1.6191e-01, -8.6930e-02],
[-3.7430e-03, -2.3562e-03, 1.0802e-01],
[-1.5953e-01, -2.3848e-01, -2.8670e-02]],
[[ 1.3047e-01, 3.2880e-02, -1.4517e-01],
[ 1.3083e-01, 5.0817e-02, 5.0530e-02],
[-1.5306e-02, -1.1522e-02, 1.5216e-02]],
[[-6.0879e-02, -6.1926e-02, -1.9709e-01],
[-6.9997e-02, -1.2806e-01, -2.5141e-01],
[ 1.0185e-02, 3.1994e-04, -2.4052e-01]]],
[[[-2.1344e-02, -1.6079e-01, 9.8982e-03],
[-2.4127e-02, -1.8948e-01, -3.2236e-02],
[-1.3980e-01, -2.2053e-01, -8.9034e-02]],
[[-4.1793e-03, -1.5344e-01, -2.4277e-01],
[-4.2296e-02, -8.4506e-02, -1.0980e-01],
[-1.3123e-01, -7.1301e-02, -7.8770e-02]],
[[-1.5964e-02, 1.2957e-01, 2.0793e-02],
[-4.2672e-02, -6.5485e-02, 1.1734e-01],
[ 8.7051e-02, 9.4430e-02, 1.3714e-01]],
...,
[[-2.0361e-02, 9.6373e-02, 5.4046e-02],
[ 1.4813e-02, 5.6324e-02, 9.3778e-02],
[ 1.2685e-02, -2.7207e-02, 1.7389e-01]],
[[ 6.8471e-02, -7.6043e-02, 9.6424e-02],
[ 4.7395e-02, 1.1383e-01, 5.9203e-02],
[ 1.1125e-01, 9.5144e-02, 4.6246e-02]],
[[ 7.7431e-02, -2.3219e-02, -1.9968e-01],
[ 4.9411e-02, 5.4234e-02, -3.3444e-01],
[-3.7287e-02, 4.0487e-02, -3.0120e-01]]],
...,
[[[ 3.4511e-02, -3.5855e-02, -8.6767e-02],
[ 2.3953e-02, 1.2105e-01, -2.5112e-02],
[-6.0123e-02, 1.4529e-02, 1.8618e-01]],
[[-2.3446e-01, -1.9216e-01, 1.3304e-01],
[ 3.7718e-02, 9.4907e-03, 2.1824e-01],
[-6.2784e-02, -2.3477e-01, 4.6091e-03]],
[[ 2.6481e-02, 1.8142e-01, 9.0489e-02],
[ 7.8420e-02, 1.7293e-01, -5.5883e-02],
[ 4.2456e-02, 1.9001e-01, 5.0757e-03]],
...,
[[ 1.4889e-02, 8.3610e-02, -7.7544e-02],
[ 1.6723e-01, 1.3938e-01, 7.3581e-02],
[-1.2384e-01, -6.6178e-03, 4.0996e-02]],
[[-2.9093e-01, -1.0446e-01, -1.6490e-01],
[-2.1380e-01, 2.0932e-02, -2.9634e-01],
[-1.0509e-01, 2.1168e-02, -2.1835e-01]],
[[ 1.7516e-01, 2.4343e-02, -1.6996e-01],
[-1.4015e-01, -5.3434e-02, 6.4022e-02],
[-4.2668e-02, -1.9958e-01, -4.8837e-02]]],
[[[ 1.3037e-01, -2.3090e-03, -3.2762e-02],
[-4.0086e-02, 8.7604e-03, -9.5048e-02],
[ 2.5287e-02, -1.7415e-02, -6.9844e-03]],
[[ 9.3061e-02, 7.1664e-03, 1.2316e-01],
[-5.5210e-02, -1.2144e-01, -1.4433e-02],
[-1.6575e-01, 9.1291e-03, 5.9757e-02]],
[[ 8.0498e-02, -4.3248e-02, -6.5318e-02],
[ 7.4363e-02, 5.9560e-02, -8.1929e-02],
[ 3.6392e-02, -9.0025e-03, -3.0136e-02]],
...,
[[-2.6826e-01, -4.0472e-01, -2.7095e-01],
[ 4.4568e-02, -1.2413e-01, -2.6865e-02],
[ 1.9192e-01, 2.4679e-01, 1.5749e-01]],
[[-1.7790e-01, -2.1425e-02, -1.1925e-01],
[ 1.0908e-01, 1.1921e-01, -5.4665e-02],
[ 4.6016e-02, 2.5744e-02, -7.2850e-02]],
[[-2.6494e-02, 4.8645e-02, 7.8577e-02],
[-1.9410e-01, 1.7393e-02, -1.6910e-01],
[ 2.1852e-02, 1.1434e-01, 1.3467e-01]]],
[[[-2.1439e-02, -1.2860e-01, -3.3354e-02],
[-1.9828e-01, -1.5123e-01, -6.4079e-02],
[ 7.0391e-02, 4.6481e-02, 1.3923e-01]],
[[-9.2065e-02, 2.9923e-02, -1.2463e-01],
[-8.0612e-02, -2.9813e-03, -8.2280e-02],
[-2.9841e-01, -1.9985e-01, -1.8412e-01]],
[[ 1.3510e-01, 1.3553e-01, 1.0014e-01],
[ 1.1736e-01, 4.7019e-02, 8.0113e-03],
[ 1.6550e-01, -2.0160e-02, 1.0073e-01]],
...,
[[-1.5513e-01, -2.8554e-01, -2.1674e-01],
[-2.7626e-01, -3.1343e-01, -2.8633e-02],
[-1.6312e-01, -1.2400e-01, 8.3710e-02]],
[[ 7.6278e-02, 8.5441e-02, 2.6088e-02],
[ 8.8889e-02, 1.3909e-01, -2.8131e-02],
[ 1.2729e-01, 3.3334e-02, -4.8737e-03]],
[[ 1.8940e-01, 1.8810e-02, -1.5500e-01],
[ 6.9878e-03, -7.4199e-02, -3.3896e-02],
[-1.5072e-01, -2.6533e-02, -1.6166e-01]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([ 0.2907, 0.2309, 0.0646, -0.0624, -0.5818, 0.2733, -0.0341, -0.1980,
-0.0255, -0.1768, -0.3522, 0.0631, 0.1088, -0.0305, -0.0157, 0.0874],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[-2.8437e-02, -8.5939e-03, 8.4848e-02],
[-1.4494e-01, 1.4209e-01, 1.6851e-02],
[-1.7729e-02, 8.0643e-02, -1.8047e-02]],
[[-5.8488e-02, -2.6143e-01, -4.7054e-03],
[-3.0788e-02, -2.0346e-01, -3.0580e-02],
[-1.4150e-01, -6.1988e-02, -6.3759e-02]],
[[ 5.5997e-02, -6.7344e-02, 4.6568e-02],
[ 7.3815e-02, -1.6158e-02, -5.3425e-02],
[-1.1837e-01, 1.7483e-02, 1.9649e-02]],
...,
[[ 1.3940e-01, 5.1406e-02, 4.6435e-02],
[ 1.9090e-01, 5.9954e-02, 1.2611e-02],
[ 8.6590e-02, 7.0700e-02, -3.8637e-02]],
[[ 9.3279e-02, -1.5982e-01, -1.0740e-01],
[ 5.1663e-02, -2.4368e-01, 3.8943e-02],
[-2.9324e-01, 1.0030e-02, 4.6086e-02]],
[[ 2.1981e-04, -9.6202e-02, 6.7938e-02],
[-4.6106e-02, -4.2488e-02, 1.2970e-01],
[-7.2652e-02, 1.4365e-01, 2.2541e-01]]],
[[[-6.5784e-02, -6.7720e-02, -1.4863e-01],
[ 7.1033e-03, 1.1360e-01, 1.0912e-01],
[ 5.2771e-02, 8.6185e-02, 5.0102e-02]],
[[-9.3756e-02, -1.5404e-01, -5.5830e-02],
[-1.2100e-01, -3.6218e-02, 1.2825e-01],
[-1.5547e-02, 6.6353e-02, 1.1748e-01]],
[[-4.9989e-02, -1.3760e-01, 1.3018e-01],
[-1.3658e-01, 2.1095e-02, 1.4825e-01],
[-8.7601e-02, 6.2429e-02, 1.8856e-01]],
...,
[[ 1.8084e-01, -1.3729e-02, -1.0221e-01],
[ 1.0390e-01, 4.3451e-02, -7.3634e-02],
[ 1.1551e-01, 1.0442e-01, -9.1103e-03]],
[[ 2.8055e-02, -7.1631e-02, -2.5416e-02],
[-1.5692e-01, -1.5537e-01, -1.7317e-01],
[-2.8629e-02, -7.3144e-02, 7.0745e-02]],
[[ 9.5062e-02, -1.8718e-01, -2.0110e-01],
[ 1.0355e-01, -1.8293e-01, -4.9956e-02],
[-1.9943e-02, -6.0394e-02, 2.8275e-02]]],
[[[ 7.6025e-02, 2.1604e-01, 1.3397e-01],
[-1.5362e-01, 1.1927e-03, 7.7186e-02],
[-1.9590e-01, -3.7798e-02, -3.6094e-02]],
[[-9.5511e-02, -6.6674e-02, 5.2411e-02],
[-5.1126e-02, -3.7572e-02, 4.5444e-02],
[ 2.0657e-03, 1.8016e-01, 2.3774e-01]],
[[-2.1974e-01, -2.0275e-01, -3.6756e-03],
[ 6.1179e-02, 1.6747e-02, 7.5955e-03],
[ 2.1843e-02, 1.0824e-01, 9.2206e-02]],
...,
[[ 7.7463e-02, 2.9190e-02, 9.1800e-02],
[ 1.1192e-01, 7.8744e-03, -7.7384e-02],
[ 9.6636e-02, -1.1431e-01, -2.3164e-01]],
[[-1.6865e-01, -1.5072e-01, -5.3525e-02],
[-2.8595e-02, -1.6435e-02, -7.5654e-02],
[-8.6965e-02, -1.0057e-01, -1.3309e-02]],
[[-1.4483e-01, -6.5377e-02, 8.9519e-02],
[-6.1641e-02, -7.3641e-02, -1.8194e-02],
[ 1.5254e-01, 1.1306e-01, 8.2455e-02]]],
...,
[[[ 1.0196e-01, 2.9415e-01, 1.7580e-02],
[ 1.3073e-01, -1.3788e-02, -2.3153e-01],
[-9.5565e-02, -2.7635e-01, -2.1478e-01]],
[[ 5.2919e-02, 1.2055e-01, 1.2647e-01],
[ 6.1263e-02, 4.7131e-03, -9.6452e-02],
[ 1.1227e-02, -4.5869e-02, -2.1864e-02]],
[[ 6.9171e-02, 6.0733e-02, 1.4136e-01],
[ 6.9235e-02, -2.8415e-02, -1.1156e-01],
[ 6.3824e-03, -3.2937e-02, -9.4563e-02]],
...,
[[-1.3070e-01, -3.8148e-02, 1.0482e-01],
[-5.4710e-02, 1.3543e-01, 1.6672e-01],
[-1.4657e-03, -5.0982e-02, 6.9575e-02]],
[[ 7.1075e-02, 7.5987e-02, 1.0126e-01],
[-2.5326e-02, 6.4861e-02, 1.6204e-02],
[ 9.8559e-02, -1.8882e-01, -1.1637e-01]],
[[ 1.0149e-01, 7.4031e-02, 2.2899e-01],
[ 9.3134e-02, 1.1432e-01, 2.2717e-02],
[-5.0568e-03, -5.7978e-02, -2.4772e-01]]],
[[[-4.3373e-02, 1.1202e-01, 1.0004e-01],
[-1.2427e-01, -1.2717e-01, -1.5723e-01],
[-4.7550e-02, -3.5967e-02, -1.3310e-01]],
[[-4.0545e-02, 6.0561e-02, 7.0649e-02],
[-1.2899e-01, 1.7277e-02, 1.4309e-01],
[-1.1497e-01, 1.5372e-01, 9.3775e-02]],
[[-4.4209e-02, 2.4782e-02, 1.2781e-01],
[-2.7838e-01, 4.2546e-02, 1.5914e-01],
[-2.4934e-01, 1.4663e-01, 1.0130e-01]],
...,
[[-1.2858e-02, 5.8047e-02, 6.3839e-02],
[ 2.8621e-03, 2.8083e-02, 1.5032e-02],
[-3.3942e-02, 7.3668e-02, 4.2530e-02]],
[[ 1.1450e-01, -1.7879e-01, 1.2134e-01],
[-4.8793e-02, -3.2825e-01, -1.9089e-01],
[ 1.3106e-01, -1.4404e-01, -8.9316e-02]],
[[ 3.3212e-02, -2.6778e-01, -1.0538e-01],
[ 4.7291e-02, -2.0402e-01, -7.5387e-02],
[ 2.6178e-03, -8.9834e-02, 9.0147e-03]]],
[[[ 1.0410e-01, -1.5175e-01, -1.2419e-01],
[-2.2621e-01, 1.8569e-03, 1.5876e-01],
[-3.2986e-01, 7.2169e-03, 1.4750e-02]],
[[-3.7051e-01, 4.2474e-02, 1.8489e-02],
[-8.1889e-02, -2.4672e-01, 9.0744e-02],
[-2.1888e-02, -1.4336e-01, 1.8784e-02]],
[[-5.7867e-01, -7.6946e-02, -1.3647e-01],
[-1.3126e-01, -7.3189e-02, 8.8183e-02],
[ 3.3997e-02, 3.4830e-03, 1.2399e-01]],
...,
[[-1.1053e-01, -4.2484e-02, -7.7115e-02],
[ 1.7115e-01, 1.5322e-02, -3.2542e-02],
[ 9.1158e-02, 1.1394e-01, -1.0146e-01]],
[[ 1.8766e-01, 1.8882e-01, 1.4374e-01],
[ 6.3135e-02, -1.7553e-03, -7.2593e-02],
[-1.1917e-01, -3.4238e-01, -3.6201e-01]],
[[-1.3023e-01, -9.4263e-02, -2.6341e-01],
[ 1.0274e-01, -2.3561e-02, -2.4715e-02],
[ 2.8370e-02, -4.0307e-02, 1.1154e-01]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([-0.0586, 0.2044, 0.2021, -0.0706, 0.1478, 0.2476, 0.0488, -0.1763,
-0.1752, 0.0415, 0.0546, 0.0115, 0.1466, 0.0412, -0.0115, -0.0358,
0.2247, -0.1511, 0.1545, 0.1663, -0.1137, 0.0280, 0.0558, 0.2089,
0.1203, -0.1250, -0.0993, -0.1396, 0.2037, 0.0377, 0.1111, 0.0930],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[-0.0319, -0.1395, -0.0192],
[ 0.0020, -0.0543, -0.0894],
[ 0.0974, -0.1260, 0.0095]],
[[-0.1145, -0.0879, -0.0839],
[-0.0515, -0.1162, -0.0564],
[-0.0360, -0.1714, -0.0118]],
[[-0.1628, -0.1109, 0.0303],
[-0.1822, 0.0354, 0.1232],
[-0.1626, -0.0994, -0.0381]],
...,
[[-0.0866, 0.0362, 0.0296],
[ 0.0131, -0.0818, 0.0426],
[-0.0618, -0.0548, -0.0726]],
[[ 0.0343, 0.1913, 0.2256],
[ 0.0618, -0.0508, -0.0074],
[ 0.1602, -0.1909, -0.1212]],
[[-0.1778, 0.1320, 0.1022],
[-0.0095, -0.0931, 0.1807],
[ 0.0973, -0.0164, -0.2249]]],
[[[-0.0635, -0.0659, 0.0174],
[-0.1581, 0.0402, -0.0452],
[-0.0947, -0.1509, -0.1479]],
[[-0.1243, -0.1470, -0.2223],
[-0.0281, -0.0397, 0.0331],
[ 0.0441, 0.0306, -0.0972]],
[[ 0.2219, 0.1747, 0.1806],
[-0.0629, -0.0720, -0.0190],
[ 0.0933, 0.1006, -0.0922]],
...,
[[ 0.0916, 0.1449, 0.1394],
[-0.0220, 0.0563, 0.1120],
[-0.2510, -0.1725, -0.2232]],
[[-0.2421, -0.0079, -0.0041],
[ 0.0476, 0.0645, -0.1961],
[-0.0607, 0.0907, -0.0355]],
[[-0.0036, 0.0882, -0.0145],
[-0.1385, -0.2558, -0.2542],
[-0.0882, 0.0134, -0.1543]]],
[[[-0.0285, 0.0937, 0.0414],
[-0.0023, 0.1534, 0.0230],
[-0.0877, 0.0505, -0.1891]],
[[-0.0810, -0.0368, -0.0127],
[-0.1991, -0.0281, -0.0046],
[-0.3723, -0.1578, 0.0306]],
[[-0.0244, 0.1248, 0.1266],
[-0.0338, 0.1736, 0.1802],
[-0.0682, 0.0369, -0.1003]],
...,
[[-0.1133, -0.3351, -0.1310],
[-0.0324, -0.2598, -0.4871],
[-0.0624, -0.3846, -0.1957]],
[[-0.2646, -0.0429, 0.0731],
[-0.2436, 0.0316, 0.0354],
[-0.1228, 0.1669, 0.0075]],
[[ 0.1980, 0.2097, 0.2765],
[ 0.0581, 0.1065, -0.1276],
[-0.1742, -0.1393, -0.2569]]],
...,
[[[-0.2025, -0.1527, -0.1159],
[-0.2595, -0.1881, -0.1356],
[-0.1887, -0.0808, -0.0933]],
[[-0.1895, 0.0984, 0.0525],
[-0.1539, 0.0656, 0.1390],
[-0.1691, 0.0148, 0.1319]],
[[-0.2232, -0.1306, -0.0138],
[ 0.0844, 0.0318, 0.1722],
[-0.1549, 0.0323, 0.1190]],
...,
[[ 0.0477, 0.0863, -0.0588],
[-0.0019, 0.0265, -0.0291],
[ 0.0141, 0.1355, 0.0036]],
[[-0.1519, 0.0833, -0.1145],
[-0.3509, 0.0082, -0.2133],
[-0.2272, 0.1071, -0.1987]],
[[-0.0792, -0.0994, -0.3562],
[-0.1894, 0.0307, 0.0436],
[-0.2360, -0.2434, 0.0058]]],
[[[-0.0906, -0.1880, -0.1391],
[-0.0646, -0.0589, -0.1817],
[-0.0401, 0.0562, -0.1074]],
[[ 0.0080, -0.2157, -0.1337],
[ 0.1446, -0.1333, -0.0807],
[ 0.1260, -0.0632, -0.3692]],
[[-0.2600, -0.1466, 0.0780],
[-0.0099, -0.0654, 0.0871],
[ 0.0701, -0.2000, -0.1011]],
...,
[[ 0.0476, -0.1730, -0.0762],
[ 0.0120, -0.1330, -0.0271],
[ 0.0781, -0.0447, -0.1026]],
[[-0.1052, -0.0812, 0.2407],
[-0.0712, 0.1693, 0.1575],
[ 0.0705, 0.2339, -0.0694]],
[[-0.2084, -0.0220, -0.1449],
[ 0.1384, -0.1034, 0.3178],
[ 0.1537, 0.0461, -0.0435]]],
[[[-0.3031, -0.0268, 0.0912],
[-0.2049, -0.1516, 0.0196],
[-0.1684, -0.1489, 0.0158]],
[[-0.0044, -0.0206, -0.0348],
[ 0.0729, 0.0486, 0.0561],
[ 0.2299, -0.1294, -0.1981]],
[[ 0.0683, 0.0412, 0.0362],
[ 0.0082, 0.0872, 0.1157],
[-0.0897, -0.1754, -0.0041]],
...,
[[-0.0193, -0.0056, 0.0750],
[-0.1138, 0.0621, 0.1296],
[-0.0705, -0.1301, -0.0561]],
[[ 0.0290, 0.0512, -0.0774],
[-0.0981, -0.0827, -0.0802],
[-0.3140, -0.2010, -0.0554]],
[[ 0.1093, -0.0178, -0.0286],
[-0.1011, -0.1247, -0.1822],
[-0.1260, -0.3032, -0.3333]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([ 0.1181, 0.1817, 0.0855, 0.0343, 0.1514, -0.1434, 0.1246, -0.1264,
-0.0208, -0.0529, -0.0692, -0.0863, 0.1519, 0.0974, 0.0477, -0.0598,
0.0341, -0.0535, 0.0703, -0.1049, 0.0380, 0.1337, -0.0573, -0.0833,
0.0637, 0.0266, 0.1094, -0.1174, -0.1245, -0.0337, -0.0265, 0.1521],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[-0.1856, -0.0745, 0.1620],
[-0.1214, -0.0566, 0.0785],
[-0.1064, -0.0099, 0.1139]],
[[-0.0837, 0.1266, -0.0918],
[ 0.0245, 0.0291, 0.0251],
[-0.0516, 0.0496, -0.0302]],
[[ 0.1089, 0.0329, 0.0732],
[ 0.1054, 0.1181, 0.0433],
[ 0.0581, 0.0333, -0.1391]],
...,
[[-0.0865, 0.1743, -0.0090],
[ 0.0192, 0.1453, -0.0020],
[-0.0587, 0.1341, 0.0719]],
[[-0.2229, 0.0624, 0.1075],
[-0.0116, 0.0300, 0.0878],
[-0.0311, 0.0544, 0.0419]],
[[ 0.1393, -0.0059, -0.0005],
[ 0.1574, 0.1398, 0.0974],
[ 0.0949, 0.0882, -0.1005]]],
[[[ 0.0008, -0.0164, 0.0592],
[ 0.0219, 0.1198, 0.1233],
[ 0.0583, 0.1224, 0.0337]],
[[-0.1985, -0.2563, -0.3434],
[-0.0405, -0.1833, -0.1891],
[ 0.0310, -0.2342, -0.2197]],
[[-0.0395, -0.0914, -0.1111],
[ 0.0414, 0.0403, 0.0263],
[ 0.0380, 0.2010, 0.0209]],
...,
[[-0.2428, -0.0463, 0.0063],
[ 0.0397, 0.0676, 0.1816],
[ 0.0644, 0.0916, 0.1180]],
[[-0.0527, 0.0205, -0.0715],
[ 0.0314, 0.2311, 0.0425],
[ 0.0167, 0.0844, 0.0478]],
[[ 0.1124, 0.1257, -0.0572],
[ 0.1127, 0.0761, 0.0403],
[-0.1084, 0.1779, -0.0052]]],
[[[-0.0084, -0.1134, -0.0067],
[-0.2802, -0.3966, -0.0497],
[ 0.0925, -0.2000, 0.1249]],
[[-0.0534, 0.0465, -0.0304],
[ 0.2654, 0.1405, -0.0182],
[-0.1730, -0.0905, -0.0400]],
[[-0.0513, -0.1679, -0.1100],
[ 0.0318, -0.1751, -0.0024],
[ 0.0715, 0.0258, 0.1275]],
...,
[[ 0.0147, -0.1150, 0.1347],
[ 0.1651, -0.1710, -0.0192],
[ 0.0272, -0.0301, -0.0813]],
[[-0.0781, -0.1142, -0.0896],
[-0.2281, -0.0950, -0.0239],
[ 0.1067, -0.0358, 0.0956]],
[[-0.3088, -0.1006, -0.0720],
[-0.2525, -0.0453, -0.0857],
[-0.1445, -0.0644, 0.2560]]],
...,
[[[-0.0828, 0.1137, -0.0157],
[ 0.1493, 0.2553, 0.0869],
[ 0.0242, 0.0328, -0.2022]],
[[ 0.0835, -0.1897, -0.0418],
[-0.2183, -0.1913, -0.0330],
[ 0.0671, 0.1072, 0.1318]],
[[ 0.0957, 0.1746, -0.0152],
[ 0.0382, 0.2222, 0.0640],
[ 0.0219, -0.0115, -0.1994]],
...,
[[-0.1812, 0.0480, 0.0165],
[-0.1681, 0.1597, 0.0494],
[-0.0808, -0.0109, -0.0600]],
[[ 0.0500, 0.1212, -0.0486],
[ 0.1903, 0.2517, -0.0206],
[ 0.0156, -0.1271, -0.1914]],
[[ 0.2135, 0.1109, 0.1256],
[ 0.1144, -0.0734, -0.1116],
[-0.0598, -0.1967, -0.2520]]],
[[[-0.0859, -0.0618, -0.0627],
[-0.0149, -0.0244, -0.0593],
[-0.1069, -0.0951, -0.0937]],
[[ 0.0181, -0.0469, 0.0025],
[ 0.0201, 0.0091, 0.0069],
[ 0.0508, 0.0310, 0.0368]],
[[-0.0061, 0.0316, -0.0402],
[-0.0143, -0.0289, -0.0907],
[ 0.0245, 0.0089, -0.0682]],
...,
[[ 0.0338, -0.0083, -0.0220],
[-0.0104, -0.0321, 0.0379],
[-0.0045, -0.0372, 0.0008]],
[[ 0.0020, -0.0661, -0.0551],
[-0.0552, -0.0015, -0.0750],
[-0.0690, 0.0088, 0.0023]],
[[-0.0505, -0.0433, 0.0043],
[-0.0975, -0.0239, 0.0037],
[ 0.0047, -0.0746, -0.0097]]],
[[[ 0.0543, 0.0849, 0.1034],
[-0.0473, -0.1811, -0.1569],
[ 0.0156, -0.1269, -0.0319]],
[[-0.1734, 0.0863, 0.0383],
[-0.0627, 0.1199, 0.0075],
[-0.0106, -0.0053, -0.0684]],
[[-0.1319, -0.1651, -0.1023],
[-0.1690, -0.1787, -0.1096],
[ 0.0586, -0.0316, -0.0221]],
...,
[[ 0.0593, 0.1192, 0.0261],
[-0.0374, -0.0944, -0.1924],
[-0.0191, -0.0664, -0.1065]],
[[ 0.0587, 0.0423, -0.0545],
[-0.0925, -0.0740, -0.2023],
[ 0.0950, 0.0504, 0.0386]],
[[ 0.1078, 0.0958, 0.0330],
[ 0.0500, -0.0751, -0.0674],
[-0.1936, -0.1639, -0.0857]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([ 0.0198, 0.1625, 0.1341, 0.0439, 0.0782, 0.1205, 0.0159, 0.1971,
-0.0412, -0.1252, 0.0389, -0.0317, -0.0179, 0.2281, -0.0367, 0.0181,
-0.0915, -0.0327, 0.1872, -0.1761, -0.0282, 0.1106, 0.2230, 0.1748,
0.2520, -0.1129, 0.0780, 0.1794, 0.0478, 0.0328, -0.1221, 0.0679],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[ 4.9508e-02, -2.7186e-02, -1.1208e-01],
[ 1.1738e-01, -5.8499e-02, -1.3976e-01],
[-3.5945e-02, -2.4942e-01, -9.9974e-02]],
[[ 1.1304e-01, -1.3313e-01, -5.9361e-02],
[ 1.2447e-01, 2.0917e-02, -1.7929e-01],
[ 1.9544e-02, -9.7610e-02, -5.0457e-02]],
[[ 2.8588e-02, -1.4804e-01, -1.1242e-01],
[ 6.9131e-02, 1.1287e-01, -1.7460e-02],
[ 4.1049e-02, 1.8998e-01, -1.2257e-01]],
...,
[[ 1.0299e-01, 1.2008e-02, -8.2044e-02],
[ 9.1248e-02, -1.7789e-02, -7.9948e-02],
[ 1.4978e-01, 1.2822e-01, -4.9081e-02]],
[[-4.1654e-02, 3.1937e-03, 3.1249e-02],
[-2.5631e-02, 9.2369e-02, -2.8292e-02],
[ 2.9793e-02, -2.6886e-02, 3.5746e-02]],
[[-2.4026e-01, -4.3634e-02, 2.6212e-01],
[-1.4932e-01, -2.0734e-02, 1.8710e-01],
[-1.9730e-01, -5.4662e-02, 2.7582e-01]]],
[[[ 1.5425e-02, -1.3200e-01, -1.2640e-01],
[-8.3420e-02, -1.5728e-01, 1.1061e-01],
[-1.3874e-01, 1.6367e-02, 8.2508e-02]],
[[-1.0136e-01, -4.6024e-01, -2.2342e-01],
[-5.5587e-02, -1.2816e-01, 7.2486e-03],
[-2.9965e-02, 5.1934e-02, 1.0226e-01]],
[[-1.5513e-01, 8.6901e-02, 1.0828e-01],
[-2.4229e-01, -8.1981e-02, 1.3513e-01],
[-3.7364e-02, -2.3722e-02, -9.9552e-02]],
...,
[[-5.9742e-02, -8.9543e-02, -2.5392e-01],
[ 4.4209e-02, -9.2420e-02, -1.7368e-01],
[-7.7469e-03, -6.3530e-02, -5.2062e-02]],
[[ 4.1098e-02, -7.6429e-02, 8.6607e-03],
[-5.5149e-02, 1.6577e-02, -4.3630e-02],
[-2.2999e-04, 1.8884e-02, -8.9167e-03]],
[[ 1.0878e-02, 9.9666e-02, 6.7072e-02],
[ 7.3624e-02, -7.3989e-03, -1.4116e-02],
[-3.6198e-02, -3.5858e-02, -3.8742e-02]]],
[[[ 5.1092e-02, 1.8709e-02, -6.5976e-02],
[ 3.9679e-02, -2.4347e-01, 2.9743e-02],
[-1.1167e-01, -1.4858e-01, 1.3133e-01]],
[[ 1.3656e-01, -2.0578e-01, -7.8741e-02],
[ 1.7718e-01, -9.7266e-02, -3.1285e-02],
[-8.5512e-02, -5.6795e-02, 4.0030e-02]],
[[-3.4990e-02, -1.0548e-01, -2.1015e-01],
[-9.9969e-02, -6.6486e-02, -1.4267e-01],
[-2.8256e-02, 1.2369e-01, 2.2221e-01]],
...,
[[ 6.8915e-02, 8.5856e-02, 3.9600e-02],
[ 8.7301e-02, 2.1758e-02, -1.0419e-01],
[ 9.4402e-05, -6.6617e-02, -1.6408e-01]],
[[-3.1478e-02, 1.2911e-02, -4.0886e-03],
[-4.7746e-03, 2.9520e-02, 1.4471e-02],
[ 5.1632e-02, -4.7004e-02, 9.7855e-03]],
[[-1.0483e-01, 1.3506e-01, 7.7920e-02],
[ 3.5689e-02, 1.4957e-01, 1.7153e-01],
[ 1.4306e-01, 1.8261e-01, 1.0243e-01]]],
...,
[[[ 1.3773e-02, 6.6518e-02, -2.6743e-02],
[-3.7483e-02, -1.0101e-01, -3.2543e-02],
[-3.8654e-01, -2.7714e-01, -1.7606e-01]],
[[-8.0563e-02, -6.1499e-02, 2.5038e-02],
[-6.4628e-02, 2.5063e-02, 4.7714e-03],
[-2.7798e-01, -2.7645e-01, -1.2831e-01]],
[[-2.3893e-02, -5.6441e-02, -7.0775e-02],
[-1.3502e-01, -1.5983e-01, -2.2376e-01],
[-4.8170e-01, -3.2219e-01, -2.8245e-01]],
...,
[[ 1.0366e-01, 1.2806e-01, 2.8254e-02],
[ 7.6354e-02, 4.7374e-02, -5.5359e-02],
[ 4.2596e-02, -1.2667e-01, -1.2515e-01]],
[[-3.2439e-02, -1.3332e-02, 8.1423e-03],
[-3.5123e-02, 3.8192e-02, -7.4630e-03],
[-2.6248e-02, -4.7801e-02, -1.4686e-02]],
[[-1.1500e-01, -6.2746e-02, 1.1970e-01],
[-2.0461e-02, -1.6197e-01, -1.0665e-01],
[-8.2332e-03, -2.7124e-01, 8.6461e-02]]],
[[[-1.2357e-02, 3.6338e-02, -7.1279e-02],
[ 3.1637e-02, 4.2750e-02, 1.9538e-02],
[ 8.1332e-02, 9.7988e-02, 5.0272e-02]],
[[-4.1695e-02, -1.7693e-02, 3.2868e-02],
[ 1.3931e-01, 3.8371e-02, 7.8947e-02],
[ 8.0027e-02, -3.5676e-03, 1.5097e-02]],
[[-6.7760e-02, -1.5750e-01, -2.5419e-02],
[ 7.2595e-02, -6.9560e-02, 5.9093e-02],
[ 9.3426e-02, -9.5684e-02, -1.9472e-01]],
...,
[[-8.0291e-02, -1.3463e-01, 3.6176e-02],
[-3.9726e-02, -1.4002e-01, -3.5224e-02],
[ 7.0450e-02, 6.5958e-02, 6.3720e-02]],
[[-4.4762e-02, 2.9894e-02, 6.8702e-03],
[ 6.3965e-02, 1.4735e-03, 1.8679e-02],
[ 7.1961e-02, 1.5666e-02, -8.4463e-02]],
[[ 4.2029e-02, 1.6082e-01, -3.9114e-02],
[-6.7966e-02, -7.8375e-02, -5.5173e-02],
[-5.2080e-02, -3.9236e-02, 5.9872e-02]]],
[[[-2.9841e-01, 1.2769e-03, 2.7599e-01],
[-1.6259e-01, -1.6121e-01, -1.6573e-02],
[ 6.4907e-02, -2.7865e-01, -1.0820e-01]],
[[-2.1209e-01, 1.0935e-01, 8.1524e-02],
[-1.4458e-01, 2.2833e-02, 7.8603e-02],
[ 1.6839e-02, -4.1388e-01, -3.3138e-01]],
[[ 2.9373e-01, 1.0740e-01, -1.0300e-02],
[-8.3501e-02, -3.1812e-01, -1.7881e-01],
[-1.4691e-01, -2.2622e-01, -1.5365e-02]],
...,
[[-1.4152e-01, -1.5073e-01, 3.4692e-02],
[-2.2660e-02, 1.8300e-01, 5.1822e-02],
[ 1.3174e-01, 5.6258e-02, -1.0039e-01]],
[[-2.4813e-02, -9.0552e-03, 3.3542e-02],
[-2.5286e-02, 1.7340e-02, -3.6346e-02],
[-2.1361e-02, 5.5996e-02, -4.9120e-02]],
[[ 9.7077e-02, -1.0297e-01, -1.9890e-01],
[-7.6801e-02, -2.9136e-02, 3.2375e-03],
[-2.3035e-01, -4.1233e-02, -1.8898e-01]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([ 0.0158, -0.0411, -0.0280, 0.0512, 0.0165, -0.0493, -0.0611, -0.1076,
0.0397, -0.0991, 0.2028, -0.0713, 0.2240, 0.0256, 0.1016, 0.1453,
-0.0538, -0.0989, 0.0317, -0.0194, 0.0769, 0.0641, -0.0447, 0.1152,
-0.0938, -0.0668, -0.1518, 0.1277, -0.0662, 0.0791, 0.0933, 0.0816],
device='cuda:0')),
('fc_layers.0.weight',
tensor([[-0.0016, -0.0539, -0.0319, ..., -0.1360, -0.3227, -0.0732],
[-0.0429, 0.0963, -0.0807, ..., -0.1543, -0.1253, 0.1133],
[-0.0548, -0.0074, -0.1488, ..., -0.3646, -0.0495, 0.1091],
...,
[-0.0498, -0.0729, 0.0416, ..., -0.0954, -0.0154, -0.0895],
[ 0.0937, 0.1295, -0.0863, ..., -0.0838, 0.1223, -0.0073],
[-0.0164, 0.0218, -0.1404, ..., -0.0476, -0.1108, -0.0968]],
device='cuda:0')),
('fc_layers.0.bias',
tensor([ 7.2826e-02, 5.4358e-02, 2.5934e-01, 2.2788e-01, 1.3248e-01,
1.0481e-01, 4.3507e-02, 2.2472e-01, -1.0593e-02, 1.7815e-01,
1.0856e-01, -2.2528e-02, -1.6292e-01, 4.5420e-02, -7.9497e-02,
2.9828e-01, 9.3863e-02, 6.3144e-03, 3.1334e-02, -4.1996e-03,
2.7098e-02, -1.7850e-02, 2.7422e-02, -9.8286e-02, 2.4514e-02,
-8.2757e-02, -6.4133e-02, -5.0845e-02, -1.1522e-01, -6.5101e-02,
1.3406e-01, 2.2623e-01, -7.9561e-02, 2.2933e-02, -5.0414e-02,
1.4450e-01, -8.8381e-03, 1.3522e-01, -4.6344e-03, -8.3455e-02,
1.9880e-02, -4.5854e-02, -6.5084e-02, -3.9354e-02, -5.2033e-02,
-6.3425e-02, -4.4866e-02, -4.5927e-02, 7.5468e-02, -7.1798e-02,
-4.6727e-02, 1.0235e-02, -5.1324e-02, -1.5371e-02, -8.4088e-02,
-3.2835e-02, 1.0477e-01, 3.6323e-03, -7.6309e-02, -6.0538e-02,
-2.1267e-02, -4.2861e-02, -3.6886e-02, -3.6463e-02, -5.3250e-02,
-4.6613e-02, 1.6153e-01, -7.3572e-02, -3.2023e-02, -3.7439e-02,
6.0361e-02, 1.3322e-02, 1.0068e-01, -7.5418e-02, -1.0577e-01,
-8.7959e-02, -1.5813e-02, -9.8309e-02, -1.3455e-02, 1.2062e-02,
-1.5226e-02, -3.8085e-05, -4.8615e-02, -7.3362e-02, -4.7257e-03,
1.2063e-01, -8.4020e-02, -3.5735e-02, -3.6193e-02, 4.9845e-02,
-1.8023e-02, -5.8292e-02, -3.0717e-02, -1.2037e-01, -9.4802e-03,
1.4043e-02, -3.3630e-02, -4.7547e-02, -3.7566e-02, 1.9653e-01,
-1.1831e-01, 5.0586e-02, -6.5239e-02, -1.4689e-01, -7.3368e-02,
-5.9380e-02, -5.5291e-02, 9.0621e-03, -5.2641e-02, 4.2505e-02,
-3.0269e-02, -8.4752e-02, -4.8303e-02, 2.5083e-02, 2.3348e-02,
-4.5128e-02, 7.1451e-03, -6.4154e-02, 1.5724e-01, 4.9253e-02,
4.2546e-02, 1.7056e-01, -2.8929e-02, -6.4913e-02, -5.5791e-03,
-3.4958e-02, -2.9529e-02, -6.9925e-02, 1.0842e-02, 7.5709e-03,
1.1651e-01, -6.1057e-02, -1.3350e-01, 1.1377e-01, -3.9497e-04,
7.2806e-03, -1.4204e-02, -3.3130e-02, -6.1426e-02, -5.9254e-02,
7.7758e-02, -8.3545e-02, 9.1363e-03, -8.8963e-02, 1.2635e-01,
2.7286e-01, 3.2067e-02, 2.0038e-02, 8.6933e-02, 5.7805e-02,
-4.9306e-02, -1.2796e-01, -2.4214e-02, 1.1817e-01, 7.4551e-02,
1.8312e-01, 1.4170e-01, 6.7097e-02, -5.4291e-02, -3.1333e-02,
-9.0007e-03, 8.1268e-02, 1.0517e-01, -2.2305e-02, -1.3134e-02,
-7.1497e-02, 5.9857e-02, 2.7234e-02, -4.2727e-02, -9.0289e-03,
-3.0237e-02, -8.6051e-02, -1.3361e-02, 6.8699e-02, 8.5107e-02,
-4.6660e-02, -9.3117e-02, 2.1992e-02, 4.0605e-02, 1.3154e-01,
-6.2107e-02, -7.8513e-03, 9.4037e-02, -2.4453e-02, -4.0165e-02,
4.5025e-02, -6.4519e-02, -1.2243e-01, 8.9524e-02, -3.1919e-02,
-4.0802e-02, -3.5863e-02, -3.4394e-02, -4.5717e-02, 1.6192e-02,
-5.8785e-02, -2.8212e-02, -7.9113e-02, -1.0866e-01, 4.0478e-02,
-1.2185e-01, -6.0640e-02, -2.4209e-01, 1.0703e-01, -4.2650e-02,
1.5277e-01, -1.1554e-02, -6.6123e-02, -4.8213e-02, 5.0187e-02,
2.1357e-01, 1.9142e-02, -6.8260e-02, 1.3288e-02, 1.0011e-01,
-1.0657e-01, -9.1037e-02, -3.4588e-02, 1.0343e-01, 3.6173e-02,
2.6462e-02, 2.0453e-02, 6.7885e-02, 1.4254e-01, 4.5103e-03,
5.5312e-02, 1.3168e-01, -1.0533e-01, -7.8836e-03, -1.3214e-02,
2.6199e-01, -4.3035e-02, 4.1927e-02, 2.4751e-02, 3.2501e-02,
-2.7232e-02, 8.5551e-02, 1.3845e-02, 9.0417e-02, -6.4853e-03,
1.1590e-01, 1.2049e-01, 1.1480e-01, -1.8301e-02, -9.5441e-02,
-3.3337e-02, -5.7212e-02, -4.6284e-02, 1.0911e-01, 5.9290e-02,
1.0697e-01, -1.6208e-01, -2.8557e-02, -7.3264e-02, -2.1819e-02,
-1.7206e-02], device='cuda:0')),
('fc_layers.2.weight',
tensor([[ 0.0698, 0.1535, -0.0429, ..., -0.0200, -0.0918, 0.2343],
[ 0.1791, 0.0036, 0.1145, ..., -0.0724, -0.0174, -0.1737],
[-0.0813, -0.1107, 0.1586, ..., -0.0519, -0.0592, -0.0212],
...,
[-0.1751, 0.0791, 0.0212, ..., 0.0006, 0.2040, -0.1856],
[-0.0042, 0.0353, -0.1872, ..., -0.0544, -0.0819, -0.0052],
[-0.1231, -0.1340, 0.0155, ..., -0.0588, 0.0816, 0.0030]],
device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.1195, 0.0157, 0.2840, 0.0887, 0.0617, -0.0885, -0.1069, -0.1549,
-0.0367, -0.1709], device='cuda:0'))])},
{'ratio': 0.58,
'bias': 128,
'train_losses': [282.2087046051525,
231.50724470511364,
196.27533884031817,
179.60311457187095,
169.23664782392626,
163.97707192123872,
158.92087912247442,
155.5871634462533,
151.94721357901474,
149.85732750784456,
148.3997749954618,
147.59593006859276,
145.80999682690253,
144.57018808003704,
142.42307142354224],
'test_losses': [257.6376896278531,
205.74569428200815,
181.24003953559725,
171.24065706776636,
164.48754103978476,
156.92090947955262,
152.29041487095404,
150.6063645400253,
145.48198465272492,
145.0580674386492,
143.36822134840722,
140.51724107125227,
142.045713448057,
137.9952088804806,
140.9035414246952],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[-0.0398, -0.2994, -0.2630],
[ 0.1666, -0.1104, 0.0613],
[ 0.3382, 0.0799, 0.0201]],
[[-0.2111, -0.2954, -0.1148],
[ 0.0215, 0.1856, -0.0588],
[ 0.3058, 0.0717, -0.0154]],
[[ 0.0015, 0.0134, 0.0510],
[ 0.1144, -0.1325, -0.0215],
[ 0.0143, 0.1481, -0.0114]]],
[[[ 0.0672, 0.0013, -0.1059],
[ 0.2523, 0.0988, 0.0310],
[ 0.0057, -0.0528, -0.2450]],
[[ 0.0669, -0.1199, -0.1902],
[ 0.3219, 0.2062, -0.2107],
[ 0.1414, 0.0059, 0.0444]],
[[-0.1299, -0.0343, -0.1227],
[ 0.1964, -0.0540, -0.1586],
[ 0.2052, -0.0202, -0.1254]]],
[[[ 0.0917, 0.2781, 0.0373],
[ 0.0745, 0.1009, 0.1068],
[-0.0998, -0.2591, -0.1016]],
[[ 0.2352, -0.0898, 0.1544],
[-0.1961, -0.2909, -0.2137],
[-0.1962, -0.0298, -0.0165]],
[[ 0.0417, -0.0670, 0.1875],
[-0.0806, 0.1593, 0.0789],
[ 0.1408, -0.0008, -0.0918]]],
[[[-0.0451, -0.1853, -0.0942],
[ 0.2663, -0.0021, 0.0859],
[-0.0882, -0.1650, 0.0882]],
[[-0.1332, 0.1202, 0.0089],
[ 0.0083, 0.0767, 0.1326],
[ 0.0661, 0.0505, -0.1714]],
[[-0.0608, 0.1090, -0.0008],
[ 0.0978, 0.0650, -0.0388],
[-0.0193, -0.0254, -0.0280]]],
[[[-0.0359, 0.2151, 0.3059],
[-0.0865, -0.1276, -0.0682],
[-0.1628, -0.1538, 0.0149]],
[[ 0.1756, 0.0050, 0.2417],
[-0.2301, -0.1716, 0.1976],
[-0.1564, -0.0298, 0.1206]],
[[ 0.1556, -0.0049, 0.2198],
[ 0.0562, -0.2358, 0.1079],
[-0.0312, -0.2301, -0.1147]]],
[[[-0.0036, -0.2201, -0.3657],
[ 0.0794, -0.0659, -0.1618],
[ 0.2809, 0.0967, -0.0405]],
[[-0.1026, -0.0444, 0.2034],
[ 0.1315, 0.0520, -0.0609],
[ 0.0242, -0.1076, 0.1109]],
[[ 0.1232, 0.0824, 0.0157],
[-0.0487, -0.0976, -0.0779],
[ 0.0920, -0.1368, 0.1547]]],
[[[-0.1713, -0.1498, 0.0604],
[-0.1367, 0.0570, 0.1576],
[ 0.0286, -0.0479, 0.2558]],
[[-0.2258, -0.3000, -0.1275],
[-0.1700, 0.1643, 0.1883],
[ 0.0172, 0.0968, 0.2643]],
[[-0.0207, -0.0210, -0.1029],
[-0.0200, 0.0563, 0.0776],
[ 0.1702, -0.0065, -0.0742]]],
[[[ 0.3190, 0.1574, -0.0031],
[-0.0397, 0.0994, -0.2348],
[-0.1037, -0.2677, 0.0571]],
[[ 0.0745, -0.0219, 0.0501],
[ 0.0943, 0.1257, -0.1592],
[ 0.0148, -0.2888, -0.0180]],
[[ 0.2469, 0.1935, -0.0388],
[ 0.2201, -0.1660, -0.0197],
[-0.0696, -0.1028, -0.1106]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([ 0.2088, 0.2395, 0.0674, 0.2705, 0.2078, -0.5102, 0.1386, 0.1438],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[-0.0945, -0.2206, 0.0624],
[-0.1962, -0.0908, 0.0319],
[-0.0324, 0.0535, 0.0609]],
[[-0.1375, -0.2325, 0.0445],
[-0.3040, -0.0526, -0.0591],
[-0.2815, 0.0021, 0.1139]],
[[ 0.0277, -0.1067, -0.1194],
[-0.0298, -0.1288, -0.0584],
[ 0.0672, 0.0854, 0.0579]],
...,
[[-0.1474, -0.2150, 0.1044],
[-0.3021, -0.0370, 0.1346],
[-0.3292, 0.0206, 0.0926]],
[[ 0.1036, 0.2160, 0.1155],
[ 0.1732, 0.0989, 0.0450],
[ 0.0604, 0.0479, -0.0479]],
[[-0.0359, -0.3452, 0.0184],
[-0.1891, -0.3283, -0.0466],
[-0.2356, -0.0484, 0.0520]]],
[[[ 0.0080, 0.0429, 0.1072],
[-0.1453, 0.0991, 0.1505],
[-0.1037, -0.0285, -0.0864]],
[[-0.1029, -0.1472, -0.0458],
[ 0.1201, 0.1155, -0.0015],
[ 0.2169, 0.0984, 0.0613]],
[[-0.0196, -0.0469, -0.1256],
[ 0.0110, -0.1825, -0.0135],
[-0.1004, -0.1198, 0.0719]],
...,
[[-0.2060, -0.3041, -0.4576],
[-0.1718, -0.0841, -0.0811],
[-0.2733, -0.1808, -0.1245]],
[[ 0.0685, 0.0762, -0.2141],
[ 0.0805, -0.1095, -0.2134],
[ 0.0432, -0.1066, -0.3827]],
[[-0.2397, -0.1345, -0.0474],
[ 0.0744, -0.0555, 0.0223],
[ 0.1578, 0.0007, -0.1450]]],
[[[-0.1282, -0.0779, -0.0530],
[-0.3009, -0.3709, -0.2196],
[-0.0040, -0.1451, -0.2375]],
[[ 0.0560, 0.0850, 0.2196],
[-0.1938, -0.2076, 0.0327],
[-0.1627, -0.4663, -0.1222]],
[[-0.0407, -0.0841, 0.0348],
[ 0.1002, -0.0388, -0.1491],
[ 0.0845, 0.2629, -0.0155]],
...,
[[-0.1303, 0.1311, 0.0121],
[-0.2999, -0.2416, -0.1011],
[-0.2276, -0.2894, -0.1345]],
[[ 0.0279, -0.0513, 0.1033],
[-0.0741, -0.2308, -0.1175],
[-0.1316, -0.1651, -0.0417]],
[[-0.0830, -0.0139, 0.0385],
[-0.1869, -0.0650, 0.0010],
[-0.0548, 0.0691, 0.0205]]],
...,
[[[-0.0274, 0.0942, 0.0401],
[-0.0565, -0.2013, -0.1213],
[-0.0738, -0.1240, 0.0502]],
[[ 0.2278, 0.2531, 0.1193],
[ 0.0314, -0.0190, -0.1173],
[ 0.1467, -0.1405, -0.1659]],
[[-0.1503, 0.1137, -0.0131],
[ 0.0959, 0.1128, 0.0038],
[ 0.0171, 0.0819, 0.0685]],
...,
[[-0.0966, 0.0906, -0.1475],
[ 0.0231, -0.1272, -0.1872],
[-0.1981, -0.3275, -0.3358]],
[[ 0.0297, -0.3875, -0.0933],
[-0.5802, -0.5114, -0.0755],
[-0.5927, -0.0579, 0.2814]],
[[-0.0274, 0.1790, 0.2548],
[ 0.0479, 0.1179, 0.0456],
[ 0.2383, 0.0865, 0.0987]]],
[[[ 0.0288, -0.0492, -0.0547],
[-0.0736, -0.1611, -0.0603],
[-0.0444, 0.0117, 0.0515]],
[[ 0.0480, 0.0231, 0.0743],
[ 0.1615, 0.1582, 0.0962],
[ 0.1347, 0.1408, -0.0330]],
[[ 0.1393, 0.0592, 0.0920],
[ 0.0442, 0.1963, 0.1393],
[-0.1244, 0.0427, -0.0447]],
...,
[[ 0.0339, -0.0187, 0.1635],
[ 0.1579, 0.0554, 0.0597],
[ 0.1166, 0.0895, -0.0321]],
[[ 0.0354, 0.2479, 0.1035],
[ 0.1871, 0.2290, 0.1013],
[ 0.0259, 0.1446, 0.1841]],
[[ 0.1455, 0.1470, 0.0769],
[ 0.0709, -0.0325, 0.0416],
[-0.1264, -0.0901, -0.0933]]],
[[[-0.0058, -0.0497, 0.0415],
[ 0.0217, 0.0925, -0.0768],
[ 0.1423, 0.2451, 0.1302]],
[[-0.0521, 0.0014, -0.1553],
[ 0.0123, -0.0735, -0.1996],
[ 0.0159, 0.1830, 0.2200]],
[[ 0.0371, 0.0318, -0.0154],
[-0.0121, 0.0602, 0.1711],
[-0.2979, -0.2832, -0.2304]],
...,
[[-0.1239, -0.0854, -0.0324],
[-0.0840, -0.0662, -0.0113],
[-0.0501, 0.0440, 0.1080]],
[[-0.0064, -0.0440, 0.1076],
[-0.1319, 0.1156, 0.1006],
[ 0.2347, 0.2617, 0.0933]],
[[-0.0243, 0.0682, -0.1218],
[-0.0500, -0.0301, 0.0142],
[-0.3044, -0.3147, -0.0084]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([ 0.0911, 0.1054, 0.2704, 0.1824, -0.0303, 0.0825, -0.4310, -0.4812,
-0.0317, 0.1370, 0.0795, 0.1403, 0.1965, -0.0217, -0.5327, -0.1317],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[-1.3910e-01, -5.0627e-01, -1.7379e-01],
[-4.4124e-02, -3.6121e-01, -2.9900e-01],
[-7.6868e-02, -1.4989e-01, -5.1622e-01]],
[[-2.1973e-02, -1.0838e-01, -2.5899e-01],
[ 1.2509e-01, 1.1911e-01, -2.9234e-02],
[ 1.4460e-01, 1.3739e-01, 2.9016e-02]],
[[ 1.4437e-01, 1.7089e-02, 1.1620e-01],
[-1.1745e-01, -4.3649e-02, -3.7902e-02],
[-1.3579e-01, -6.2856e-03, -5.9901e-02]],
...,
[[-5.9430e-02, -1.2932e-01, -8.3771e-02],
[-1.8730e-01, -2.4256e-01, 3.2695e-02],
[-4.9317e-03, -4.5458e-02, -1.2531e-01]],
[[-1.0422e-01, 4.7558e-02, 2.3365e-01],
[-2.4389e-01, -1.6415e-01, -1.0668e-02],
[-2.5474e-01, -1.3867e-01, -6.0333e-03]],
[[-9.6220e-02, -9.7969e-02, -1.3171e-01],
[-2.0373e-03, -1.4528e-01, -7.4217e-02],
[ 7.8661e-03, -1.8542e-01, -8.1266e-02]]],
[[[-1.9396e-01, 9.6317e-02, 1.0370e-01],
[-4.2046e-02, 1.0091e-01, 2.3052e-01],
[-1.1969e-02, 1.3578e-01, 4.8303e-02]],
[[ 9.5200e-02, -1.3441e-02, 4.7679e-02],
[ 1.4598e-01, -7.9125e-02, -3.3967e-02],
[ 7.5940e-02, 2.4255e-04, -1.1030e-01]],
[[ 1.1430e-02, 5.8110e-02, 2.1613e-01],
[-2.4143e-01, 2.6759e-02, 7.3130e-02],
[-2.0131e-01, -2.6582e-02, 2.9318e-02]],
...,
[[ 1.6797e-03, -9.0755e-02, -2.1240e-01],
[ 8.2676e-02, -1.6195e-01, -4.8542e-02],
[ 1.9527e-02, -2.3986e-01, -1.7598e-01]],
[[-1.1516e-01, -1.0382e-01, 1.3748e-01],
[-9.6721e-02, -1.5509e-01, 1.9399e-01],
[-8.9090e-02, -1.2637e-01, 5.4031e-02]],
[[-5.0180e-02, -4.0923e-01, -7.0171e-02],
[ 3.2534e-02, -1.9713e-01, -1.8559e-01],
[ 5.5655e-02, 2.7804e-02, 5.6595e-02]]],
[[[-1.6973e-01, 9.1448e-03, 1.3002e-01],
[-5.5329e-02, 8.0644e-02, 2.1161e-01],
[-4.1430e-01, -1.9383e-01, -1.8772e-01]],
[[ 1.3967e-01, -5.2479e-02, 8.9646e-02],
[-6.1392e-02, -1.9769e-01, 4.6407e-02],
[-3.5524e-01, -3.8766e-01, -1.8534e-01]],
[[ 6.0224e-04, 2.6381e-02, 1.0533e-01],
[-5.8640e-02, 1.9624e-01, 1.6745e-01],
[-1.9652e-01, 4.0089e-02, 6.0910e-02]],
...,
[[-3.9934e-02, -1.4698e-01, -1.8469e-01],
[ 5.7415e-02, 7.3266e-02, 1.2120e-03],
[-1.8545e-02, -9.0943e-02, 1.2959e-01]],
[[-1.4756e-01, -3.7881e-02, 2.0765e-01],
[-7.0976e-02, -9.7869e-02, -1.7208e-02],
[-9.7734e-02, -2.7145e-01, 7.5345e-02]],
[[-4.4496e-01, -6.4654e-01, -1.3962e-01],
[ 1.2230e-01, 3.3809e-02, 1.2789e-01],
[-3.7122e-02, 2.3352e-02, 7.6667e-02]]],
...,
[[[-1.9821e-02, -1.3526e-02, -8.6396e-03],
[-6.4783e-02, -9.5251e-03, 2.9059e-03],
[ 1.6360e-02, -1.6678e-01, 2.6202e-02]],
[[ 4.7249e-02, -1.5015e-01, 4.9198e-03],
[-4.5352e-04, 2.4292e-02, 1.3976e-01],
[ 1.4201e-01, 6.3382e-02, 7.2576e-02]],
[[-4.8546e-03, 4.6641e-02, 1.8791e-01],
[-1.8572e-03, -1.1156e-02, 7.3233e-02],
[-3.3246e-02, 5.4795e-02, 2.1507e-01]],
...,
[[-7.5031e-02, -1.6301e-01, -4.9285e-02],
[-7.2882e-02, -1.0648e-01, -2.1643e-02],
[-3.2841e-01, -1.7281e-01, 9.5281e-02]],
[[-1.2132e-01, -1.9128e-02, -4.9403e-02],
[-1.2546e-01, -1.7526e-01, 7.1450e-02],
[-1.4289e-02, -1.6637e-01, 9.2483e-02]],
[[-2.5868e-01, -2.1076e-01, 3.0430e-02],
[-3.8692e-01, -3.8490e-01, -2.7791e-01],
[-5.0707e-02, -3.3768e-01, -1.9464e-01]]],
[[[-1.7498e-01, -1.3625e-01, 5.3232e-02],
[-1.8974e-01, 5.0017e-03, 1.8438e-01],
[ 6.3314e-03, 1.8640e-01, 2.5479e-03]],
[[ 1.0732e-01, -5.3464e-02, -1.6119e-01],
[ 5.3803e-02, 1.2293e-01, -1.0827e-01],
[ 1.2645e-01, 7.6289e-02, 7.7300e-02]],
[[-2.2606e-02, -7.6029e-02, 7.9104e-02],
[-1.0550e-01, -3.0672e-01, 1.2873e-01],
[-8.6318e-02, 4.6665e-03, 4.4825e-03]],
...,
[[-5.1560e-02, -7.6638e-02, -4.4974e-01],
[-7.4006e-02, -2.1610e-01, -2.3958e-01],
[-1.1561e-01, -3.3152e-01, -9.5092e-02]],
[[-1.1781e-01, -3.9691e-02, -4.2305e-02],
[-2.1989e-01, -2.5179e-01, -2.4595e-01],
[-3.7908e-01, -3.7804e-01, -3.4727e-01]],
[[-1.7345e-01, -5.0523e-02, -3.7805e-02],
[-1.4751e-01, -3.6606e-02, -1.0527e-01],
[ 1.4235e-02, -1.3664e-01, -1.3825e-01]]],
[[[-2.4124e-02, -1.0050e-01, -2.8016e-02],
[ 3.2306e-02, -1.3603e-02, -3.3029e-01],
[ 7.8659e-02, 1.3708e-02, -2.0462e-01]],
[[-4.9455e-02, 1.5615e-01, 1.0830e-01],
[ 5.2468e-02, 1.4948e-01, 1.1993e-01],
[-7.0335e-03, 4.6454e-02, 9.8936e-02]],
[[ 9.0344e-02, -1.4881e-01, -4.0227e-01],
[ 2.5000e-01, 7.0799e-02, -1.3588e-01],
[ 1.5592e-01, 1.8243e-02, -2.4157e-02]],
...,
[[-2.7037e-01, -3.6140e-01, -1.5269e-01],
[-1.8872e-01, -1.5884e-01, 7.8085e-03],
[ 5.4835e-02, 5.1399e-02, -1.3784e-02]],
[[-1.7784e-01, -1.8634e-01, -2.4585e-01],
[-1.6113e-01, -2.1812e-01, -3.3195e-01],
[ 2.8209e-01, -1.1211e-02, -4.2309e-01]],
[[-1.0299e-01, -1.7140e-01, -2.0042e-01],
[-8.8924e-03, -2.8373e-01, -3.1609e-01],
[ 8.1651e-02, -1.9849e-01, -2.7618e-01]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([ 0.0110, 0.0305, 0.0273, 0.1834, -0.0650, -0.0593, -0.2615, -0.0041,
-0.0223, -0.0794, -0.1824, 0.1275, -0.0070, 0.1832, -0.0165, 0.0151,
0.2512, -0.1408, 0.0782, -0.0491, -0.0783, 0.2401, 0.0169, 0.0110,
-0.0215, 0.1160, 0.3456, -0.0836, -0.1691, 0.2567, 0.2088, 0.3028],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[ 1.4095e-01, 2.1726e-02, 2.6242e-02],
[ 1.5609e-01, 5.8397e-02, -1.2679e-01],
[ 5.8346e-02, -1.5134e-01, -2.3724e-01]],
[[ 3.3754e-02, 4.9319e-02, 2.3164e-02],
[-7.4317e-02, -3.2700e-02, -1.2491e-02],
[ 6.5495e-02, -2.9745e-02, 1.6376e-02]],
[[-2.4491e-01, -1.6261e-01, 2.2099e-02],
[-6.5012e-02, -6.1915e-02, -1.4705e-01],
[ 2.1830e-02, -1.4145e-01, -7.8632e-02]],
...,
[[-2.2989e-01, -1.9328e-01, -7.4755e-02],
[-2.8442e-03, -1.3033e-01, -3.6785e-02],
[ 1.3426e-01, -1.3835e-03, 2.4317e-02]],
[[ 1.7735e-01, 1.7954e-01, 7.0418e-02],
[ 8.3625e-02, 4.7892e-02, 2.3369e-03],
[-1.6244e-02, -6.7104e-02, -1.1491e-01]],
[[-1.9467e-02, -5.4452e-02, -4.1050e-02],
[ 3.2895e-02, 6.8860e-02, -1.1685e-02],
[ 1.8949e-02, 3.2119e-02, 3.8073e-02]]],
[[[-6.1836e-02, -1.9042e-01, -4.5538e-02],
[-1.3917e-01, -1.2843e-01, -1.0136e-01],
[-7.5006e-02, -8.8919e-02, -1.6642e-02]],
[[-6.2668e-02, -1.0888e-01, -5.0425e-02],
[-7.4130e-02, -3.7578e-02, -1.6885e-01],
[ 4.5764e-02, 9.8903e-03, -1.4774e-01]],
[[-1.5540e-01, -1.0115e-01, -9.5865e-02],
[-3.1112e-01, -1.6352e-01, 1.2996e-01],
[-2.5578e-01, -1.2040e-01, 1.5778e-01]],
...,
[[-4.2764e-01, -1.8252e-01, -5.3818e-02],
[-1.7784e-01, -1.3008e-01, -1.1141e-02],
[-1.1524e-01, -1.9595e-01, -2.9668e-01]],
[[-1.3709e-01, -1.2899e-01, -4.2147e-01],
[-1.0486e-01, 5.1136e-02, -1.6937e-01],
[-3.1238e-01, -2.2187e-01, -6.2385e-02]],
[[-1.1859e-01, -4.0560e-02, -2.5686e-01],
[-4.0894e-03, -5.5053e-02, -1.9503e-01],
[ 2.0628e-02, 3.9994e-02, -1.3672e-01]]],
[[[-2.4083e-02, -5.6350e-02, -4.0480e-02],
[-1.0791e-01, -5.1373e-02, -2.7758e-02],
[-7.7851e-02, -5.8409e-02, -3.4570e-02]],
[[-8.2016e-02, -1.1950e-01, -1.4700e-02],
[-1.2033e-02, -4.5693e-02, -5.4755e-02],
[-3.3067e-02, -1.4830e-01, -2.9708e-02]],
[[ 1.4329e-02, 3.7573e-03, 2.9205e-02],
[-5.8305e-02, -8.3785e-04, -1.3580e-02],
[-7.2231e-02, 3.5636e-02, -6.8251e-02]],
...,
[[ 2.9749e-03, -1.1248e-01, -1.1440e-01],
[-7.9045e-02, -1.0269e-01, -4.9670e-02],
[-8.1366e-03, -5.3573e-02, -7.2651e-02]],
[[-5.6076e-02, -4.9165e-02, 5.7913e-02],
[-4.7450e-02, -8.1164e-02, 7.8774e-03],
[-8.8987e-02, -1.3924e-02, -1.5870e-02]],
[[-9.9608e-02, 2.7831e-03, -3.2289e-02],
[-8.2381e-02, -3.1941e-02, -2.6981e-02],
[-1.5967e-02, 3.1644e-02, -3.2378e-02]]],
...,
[[[-1.1542e-01, 1.6844e-02, 4.7652e-02],
[ 6.6105e-02, 7.5294e-03, 3.7075e-02],
[-1.5782e-01, -7.9190e-02, 1.0499e-01]],
[[-2.2417e-01, -1.4917e-01, -6.3683e-02],
[-2.7171e-01, -1.8580e-01, -8.6178e-02],
[-7.7094e-02, 8.1274e-03, -3.5074e-02]],
[[-1.6537e-01, -8.7702e-02, -1.4567e-01],
[ 7.5802e-02, 1.7104e-02, 1.9602e-01],
[ 1.0472e-01, 7.0219e-02, -1.3588e-01]],
...,
[[ 4.9002e-02, -6.6350e-03, -9.0734e-02],
[ 1.5746e-01, 1.1913e-02, -1.7617e-01],
[-2.8121e-03, 5.9329e-04, -1.0922e-01]],
[[ 4.5884e-02, 1.0912e-01, 3.5023e-01],
[ 2.2998e-02, -1.2101e-02, 1.3201e-01],
[ 1.0052e-01, -1.6301e-01, -6.5725e-03]],
[[-1.0457e-01, -1.5221e-01, -1.1623e-01],
[ 1.4684e-02, -9.8997e-02, -7.4220e-02],
[-2.7458e-02, -3.8079e-02, -1.8629e-02]]],
[[[ 2.2300e-01, 6.7394e-02, 1.5200e-01],
[-6.3797e-02, 5.0482e-02, 1.2503e-01],
[-1.3497e-01, -4.2654e-02, 2.1317e-01]],
[[ 1.4567e-01, -1.0319e-02, -9.0630e-02],
[-1.9568e-01, 1.8394e-03, -1.7759e-02],
[-4.1830e-02, -1.3328e-01, -3.8037e-02]],
[[ 3.9369e-02, -7.0099e-02, -1.3699e-01],
[-1.2463e-01, -1.2480e-01, -1.3446e-02],
[-1.7946e-01, -3.3014e-01, -2.0310e-01]],
...,
[[ 1.4216e-02, 1.6863e-01, 2.2824e-01],
[-2.9673e-01, -1.3765e-01, 5.6620e-02],
[-3.5889e-01, -1.9878e-01, -8.5354e-02]],
[[ 2.8559e-01, 6.6146e-02, -1.3271e-01],
[ 8.1976e-02, 1.2160e-01, -2.7331e-02],
[-1.5156e-02, 7.0622e-02, 1.7359e-01]],
[[-1.2262e-01, -1.4248e-01, -1.7965e-01],
[-1.3419e-02, -6.3636e-02, -1.8796e-01],
[-7.3223e-02, 2.4836e-04, -1.6976e-01]]],
[[[-6.7415e-02, 4.8130e-02, -2.1682e-02],
[-2.1640e-03, 2.1600e-01, 2.6291e-01],
[ 6.6427e-02, 1.8903e-01, 9.7293e-02]],
[[ 1.5237e-02, -9.6046e-02, -1.4534e-01],
[ 1.2151e-02, 1.8285e-02, -1.8209e-02],
[-7.6615e-02, 1.1647e-01, 7.0116e-02]],
[[-1.2285e-01, 4.8896e-02, -5.4867e-03],
[ 1.1462e-01, -1.0555e-02, -1.6389e-02],
[-2.5203e-02, 1.2808e-01, -4.4010e-02]],
...,
[[-2.5768e-03, 2.7864e-02, 7.4341e-03],
[-9.2527e-02, -6.0377e-02, -5.3693e-02],
[-7.8262e-02, -6.0379e-03, 1.6819e-02]],
[[-6.7338e-02, -2.8003e-01, -2.1308e-01],
[ 5.4706e-02, -7.0885e-02, -1.5605e-01],
[-3.6076e-02, -5.5821e-02, -1.1015e-01]],
[[-2.6017e-01, -4.4031e-02, 5.1768e-02],
[-9.5190e-02, -2.8257e-01, 4.1316e-02],
[-3.8850e-02, -2.1703e-01, -4.5876e-02]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([-0.0991, -0.1510, -0.0167, 0.0407, -0.0484, -0.1584, -0.0554, 0.0285,
0.1109, 0.1187, 0.0587, 0.1314, -0.0324, -0.1408, 0.2286, -0.1495,
-0.1141, 0.0580, 0.1317, 0.0003, 0.1797, 0.1286, -0.1415, 0.1340,
0.1907, -0.1020, -0.1077, -0.0996, -0.0369, -0.0892, -0.0222, -0.0441],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[ 0.0481, 0.1204, 0.2053],
[ 0.0406, 0.1558, -0.0611],
[-0.0402, -0.0274, -0.0100]],
[[-0.0066, 0.0405, 0.1074],
[ 0.0223, 0.0621, 0.1822],
[ 0.0323, 0.2558, 0.2747]],
[[-0.0063, -0.1157, -0.0538],
[-0.0315, -0.0477, -0.0419],
[ 0.0212, -0.0021, -0.0232]],
...,
[[-0.0692, -0.1417, -0.2375],
[-0.0516, 0.0236, -0.0197],
[-0.0740, 0.1130, -0.0402]],
[[-0.0379, -0.0607, -0.0374],
[-0.0474, -0.1074, -0.1290],
[ 0.1088, 0.0439, 0.1216]],
[[-0.0029, -0.0219, 0.0252],
[ 0.0211, -0.0484, -0.0809],
[ 0.0554, -0.0843, -0.0118]]],
[[[ 0.0656, 0.2019, 0.1737],
[ 0.1395, 0.1821, 0.0061],
[-0.0428, -0.0212, -0.0443]],
[[-0.1649, -0.2845, -0.0468],
[-0.2087, -0.2611, 0.1620],
[-0.1767, 0.0541, 0.2049]],
[[-0.0269, 0.0449, 0.0901],
[-0.0495, 0.0028, 0.1001],
[ 0.0034, 0.0683, 0.1126]],
...,
[[ 0.1964, 0.1940, -0.2568],
[ 0.1297, 0.1630, -0.0944],
[-0.0378, -0.2114, -0.2095]],
[[ 0.0353, -0.0787, -0.0739],
[-0.1349, -0.2223, -0.0310],
[ 0.0097, -0.0261, -0.0631]],
[[ 0.0636, 0.0688, -0.0917],
[ 0.1600, -0.1035, -0.2068],
[ 0.0686, 0.0517, -0.0195]]],
[[[ 0.0207, 0.2220, 0.1578],
[-0.0436, -0.0447, -0.0374],
[ 0.0654, -0.1662, 0.0425]],
[[ 0.1893, 0.0876, 0.0378],
[ 0.0855, -0.1127, 0.0070],
[ 0.0539, 0.1291, 0.1273]],
[[-0.0285, 0.0092, 0.0104],
[-0.0637, 0.0316, -0.0065],
[-0.0165, 0.0217, -0.0392]],
...,
[[-0.0827, -0.0973, -0.1233],
[-0.0677, -0.1000, -0.1446],
[ 0.0204, 0.0312, -0.0235]],
[[-0.1716, -0.1873, -0.0296],
[-0.0800, -0.3420, 0.0672],
[-0.2460, -0.1078, 0.1741]],
[[ 0.0357, 0.0265, 0.0268],
[-0.0172, -0.2673, -0.1026],
[ 0.0025, -0.3441, 0.0516]]],
...,
[[[-0.1156, -0.0281, -0.0161],
[-0.0075, 0.0997, 0.0846],
[ 0.1374, -0.0194, -0.1002]],
[[-0.1037, 0.0419, -0.0806],
[ 0.0100, 0.1246, -0.0795],
[-0.1144, -0.1169, -0.0407]],
[[-0.0340, 0.0427, -0.0569],
[-0.0088, -0.0179, -0.0421],
[-0.0524, 0.0641, 0.0608]],
...,
[[-0.1405, -0.2366, -0.1077],
[ 0.0943, -0.1605, 0.1190],
[ 0.0909, -0.0052, 0.1469]],
[[ 0.0328, -0.0240, -0.0863],
[ 0.0342, 0.1642, -0.0971],
[ 0.1159, 0.1937, 0.0197]],
[[ 0.0771, 0.0149, -0.0752],
[ 0.1091, 0.1762, 0.0355],
[ 0.0885, 0.0893, -0.0077]]],
[[[-0.0947, -0.0417, -0.0512],
[-0.0327, 0.0182, -0.0281],
[-0.0281, -0.0515, 0.0520]],
[[ 0.0350, -0.0536, 0.0446],
[ 0.0167, 0.0448, -0.0210],
[-0.0410, -0.0209, -0.0087]],
[[ 0.0036, 0.0265, -0.0190],
[-0.0269, 0.0152, 0.0314],
[-0.0267, -0.0233, -0.0163]],
...,
[[-0.0085, -0.0934, -0.0703],
[-0.0880, -0.0413, -0.1151],
[-0.0431, -0.0411, -0.0625]],
[[-0.0408, 0.0147, -0.0190],
[-0.0563, -0.0544, 0.0315],
[-0.0212, -0.0277, -0.0333]],
[[-0.0243, -0.0382, -0.0307],
[-0.0530, -0.0712, -0.0665],
[-0.0623, 0.0099, 0.0121]]],
[[[ 0.0766, 0.0613, -0.0933],
[ 0.0105, -0.0548, -0.0611],
[ 0.0293, 0.0188, 0.0872]],
[[-0.0064, 0.0437, 0.1223],
[ 0.0401, 0.0294, -0.0322],
[ 0.0142, 0.0253, 0.1579]],
[[-0.0074, 0.0428, -0.0050],
[-0.0168, -0.0209, 0.0392],
[ 0.0061, -0.0389, -0.0983]],
...,
[[-0.0667, -0.1000, 0.0809],
[ 0.1093, 0.0268, 0.0807],
[ 0.0147, 0.0782, 0.0526]],
[[ 0.0168, -0.1438, 0.0363],
[-0.0010, -0.1840, -0.2790],
[ 0.0552, 0.0512, -0.1587]],
[[ 0.0770, 0.0184, -0.1454],
[ 0.0369, -0.1132, -0.1283],
[ 0.1017, -0.0160, -0.0791]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([-0.1119, 0.0119, 0.2365, 0.0171, 0.2395, -0.1537, -0.0245, -0.0491,
0.1522, -0.0680, -0.0359, -0.0415, 0.1354, -0.0105, 0.0578, 0.0836,
0.1700, -0.0569, -0.0549, -0.1133, 0.1917, 0.0789, 0.1345, -0.0532,
-0.0873, -0.1187, 0.1704, 0.2302, -0.0100, -0.0837, -0.0045, 0.2851],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[ 9.1944e-02, 1.0808e-01, 1.1867e-01],
[-3.4763e-02, 6.8680e-02, -8.9628e-02],
[ 1.3088e-03, 1.5386e-02, 6.5064e-02]],
[[ 7.7633e-02, 2.4236e-01, 1.5729e-01],
[ 1.5091e-02, 6.6863e-02, -1.0180e-01],
[-1.9101e-02, -5.8219e-02, -1.0491e-01]],
[[ 3.1198e-02, 6.3995e-02, 7.1379e-02],
[ 1.4187e-01, 1.5022e-01, 1.1578e-01],
[ 2.4211e-01, 1.8364e-01, 7.6487e-02]],
...,
[[-2.9058e-02, 1.9471e-02, 2.0946e-02],
[-2.0406e-01, -7.4794e-02, 1.6158e-01],
[-2.4903e-01, 3.3352e-02, 6.4453e-02]],
[[-4.0293e-02, 3.0463e-02, 2.2810e-02],
[-5.8527e-02, -6.7784e-03, 3.5478e-02],
[ 6.5782e-03, -3.6390e-02, 1.5958e-02]],
[[-1.8908e-02, -1.6014e-01, -2.0454e-01],
[-6.7938e-03, 7.1941e-03, -1.5387e-02],
[ 2.8145e-02, 8.5255e-02, 1.6419e-01]]],
[[[ 3.3087e-02, -1.3390e-01, -4.8519e-02],
[ 1.5817e-01, 8.1536e-02, 2.9806e-02],
[ 1.9279e-01, 6.5759e-02, -1.1705e-01]],
[[-2.4556e-01, -2.1151e-01, -2.5343e-02],
[-1.6861e-01, -1.5875e-01, -2.0723e-03],
[-2.0218e-01, -3.8733e-02, -2.1444e-02]],
[[ 1.1616e-01, 9.9748e-02, 1.4779e-02],
[ 9.5275e-02, 1.3758e-01, 6.4224e-02],
[ 1.0288e-01, 1.5379e-01, 2.4744e-02]],
...,
[[-1.9836e-02, -1.9742e-01, -1.3537e-01],
[-3.4685e-03, -9.8007e-02, 1.2973e-01],
[ 2.0731e-01, -2.8369e-02, 2.3861e-02]],
[[-4.1858e-02, 4.9173e-02, 3.4633e-02],
[-5.0672e-02, -3.3710e-02, 2.3292e-02],
[ 1.2114e-01, 1.7647e-02, -1.2524e-02]],
[[-1.6872e-02, -1.5810e-01, -3.1173e-02],
[ 1.7582e-01, -4.2432e-02, -1.1621e-01],
[ 3.9319e-01, 1.2218e-01, 6.9703e-03]]],
[[[-1.9880e-02, 3.4679e-02, 1.4961e-01],
[ 6.1919e-02, -1.9020e-01, 5.5919e-03],
[ 1.6027e-01, -8.7648e-02, 2.1470e-02]],
[[ 8.2943e-02, 1.1017e-01, 1.9145e-02],
[-1.2612e-01, -7.1319e-02, -9.3768e-02],
[-1.5204e-01, -2.7057e-01, -1.1993e-01]],
[[-3.2576e-01, -2.2225e-01, 7.5513e-02],
[ 6.4291e-02, 8.1786e-02, 9.5710e-02],
[ 9.7440e-02, 1.6212e-01, 1.4752e-01]],
...,
[[-1.1036e-01, -2.4966e-02, 1.8081e-01],
[-9.0625e-02, -1.0447e-01, -6.7964e-02],
[ 5.1896e-03, -6.7810e-02, -1.6604e-01]],
[[-1.1083e-02, -5.4084e-02, -5.9747e-02],
[-1.4803e-04, 5.6023e-02, 1.1207e-02],
[-7.4327e-02, 2.7461e-02, 3.6318e-02]],
[[-3.8139e-02, 1.4915e-02, 2.6560e-01],
[-1.4058e-01, -1.3718e-01, 1.8961e-01],
[ 2.2950e-02, -2.7312e-01, -1.4904e-01]]],
...,
[[[-3.0123e-02, 9.3158e-02, 9.2010e-02],
[ 3.9983e-02, 3.0159e-02, 1.6025e-01],
[ 5.7343e-02, 1.5056e-01, 2.1528e-01]],
[[ 1.3925e-01, 2.1992e-01, 1.5523e-01],
[ 6.3776e-03, 2.9471e-02, 8.8996e-03],
[-1.7368e-01, -2.9937e-02, 6.9208e-03]],
[[-9.6357e-02, -1.4710e-01, -1.7345e-01],
[-5.6495e-02, -7.7490e-02, 6.7950e-02],
[ 6.8310e-03, 1.2345e-01, 2.9295e-01]],
...,
[[ 1.0777e-01, 1.3743e-01, 7.8031e-02],
[ 5.0346e-02, 1.5260e-01, -1.8678e-01],
[-1.3251e-01, 5.1772e-03, -2.2428e-01]],
[[ 8.9095e-02, -2.3177e-02, -1.2835e-02],
[-1.0423e-01, -1.3020e-01, -1.0241e-01],
[-4.7762e-02, -8.5132e-02, -7.4666e-03]],
[[-1.6693e-02, 4.3055e-02, 1.5861e-01],
[ 1.0586e-01, 1.7030e-01, 5.8406e-02],
[ 1.0306e-01, 3.3743e-02, -3.7479e-02]]],
[[[-4.3167e-02, -1.1076e-01, -2.5928e-01],
[-1.3231e-01, -5.1200e-03, -1.0648e-01],
[ 8.4518e-02, 4.4799e-02, -2.3883e-01]],
[[-1.1246e-01, -1.3314e-01, -6.9363e-02],
[ 5.4389e-02, -6.1281e-02, -9.1615e-02],
[-6.3688e-02, -5.1512e-02, -4.7969e-02]],
[[ 9.6538e-02, 4.9973e-02, 1.4273e-01],
[ 1.2617e-01, 7.9390e-02, 2.2097e-01],
[ 1.0241e-01, 1.3580e-01, 8.8713e-02]],
...,
[[-1.8147e-02, -1.3949e-01, -1.5485e-01],
[-9.0324e-02, -9.3568e-02, -1.2304e-01],
[ 1.2296e-01, -3.7381e-02, -2.3267e-01]],
[[ 9.8904e-03, 2.3174e-02, -6.0894e-02],
[-1.7426e-02, -5.4970e-02, 4.3342e-02],
[ 4.1441e-02, -3.2863e-02, 6.2777e-03]],
[[-1.9310e-01, -2.2184e-01, -4.5898e-02],
[-1.5714e-02, -2.9033e-02, -2.7990e-02],
[ 2.0088e-01, 9.2789e-02, -6.0651e-02]]],
[[[ 1.2964e-01, -2.8064e-01, 7.2456e-02],
[ 3.8327e-02, -2.1498e-01, -9.2248e-02],
[-1.1850e-03, -1.8056e-01, -3.7135e-02]],
[[ 1.6106e-02, 1.5868e-02, -1.7955e-01],
[ 1.5805e-02, -1.9910e-01, -1.5124e-01],
[-1.0276e-01, -1.8419e-02, -2.7157e-02]],
[[-4.4209e-02, 5.9149e-02, 8.5935e-02],
[ 1.1679e-02, 8.7827e-02, -2.9267e-02],
[-3.0904e-02, -1.6886e-01, -2.4567e-01]],
...,
[[-1.3123e-01, 1.0625e-01, 1.5836e-01],
[-1.1695e-01, 1.5395e-02, 1.8946e-01],
[-1.9351e-01, -4.3031e-02, 1.4809e-01]],
[[ 1.5032e-02, -4.1201e-02, -1.8327e-02],
[-1.1168e-01, -3.2390e-02, 6.9951e-03],
[-7.2877e-03, -6.6534e-02, -3.0143e-02]],
[[-2.9940e-02, -1.7024e-01, 1.0613e-01],
[-4.7150e-02, -3.6121e-01, -1.4845e-01],
[-3.3219e-02, -2.2471e-01, -5.2009e-02]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([ 0.0712, -0.0268, -0.0548, -0.0131, 0.1091, -0.0887, 0.0456, 0.0195,
0.1383, 0.1608, 0.0227, -0.0598, 0.2085, 0.0206, -0.0718, 0.1996,
0.1572, 0.0977, -0.0656, -0.1073, -0.0067, -0.2014, 0.0683, -0.0903,
0.0373, 0.0107, -0.0461, -0.1506, -0.0672, 0.0197, 0.0314, 0.1773],
device='cuda:0')),
('fc_layers.0.weight',
tensor([[-0.0686, 0.0701, -0.0212, ..., -0.0799, -0.0131, 0.0399],
[-0.0246, -0.0448, 0.0221, ..., -0.0307, -0.0192, 0.0257],
[ 0.0093, -0.0118, -0.0311, ..., 0.0242, 0.0421, -0.0832],
...,
[-0.0202, -0.0343, -0.0017, ..., 0.0230, -0.0201, -0.0248],
[ 0.0646, 0.0744, -0.0189, ..., -0.1883, -0.1400, 0.0301],
[ 0.0187, 0.0163, -0.0590, ..., 0.0004, -0.0433, 0.0187]],
device='cuda:0')),
('fc_layers.0.bias',
tensor([-4.9034e-02, -2.2165e-02, 2.0435e-02, 5.4468e-02, -4.1511e-02,
-9.1318e-02, -4.6781e-02, -2.0377e-02, 4.1997e-03, 7.3553e-02,
2.7978e-02, -4.0837e-02, -7.8136e-02, -7.1254e-03, -3.7164e-03,
-3.6696e-02, -6.0499e-02, -2.3607e-02, -6.5995e-02, 1.4076e-02,
2.0285e-02, -4.4404e-02, 6.9244e-02, 1.7447e-02, -6.7458e-02,
1.3769e-01, 3.5145e-02, -2.7956e-03, 5.4244e-02, 5.5100e-02,
-7.6556e-03, 4.1739e-02, -6.7150e-02, 3.1529e-02, -9.3607e-02,
5.6558e-02, 1.2195e-01, -2.3150e-02, 5.7107e-03, -4.0337e-02,
2.1916e-02, -1.1833e-02, 9.2158e-02, -3.2596e-02, -6.0222e-02,
2.8538e-02, -1.0957e-01, -4.8336e-02, 8.9367e-03, -2.7919e-02,
-8.7461e-03, 8.3756e-02, 1.9671e-02, -3.4278e-03, 3.3521e-02,
-5.2874e-03, 2.3123e-01, 5.7915e-02, -1.1251e-01, -5.5730e-02,
-2.4747e-03, -2.4138e-03, -1.3832e-02, -3.1881e-02, -5.2582e-02,
7.3469e-02, 4.6344e-02, 9.1696e-02, -6.5384e-02, 2.8454e-02,
-3.7425e-02, 1.4932e-01, -6.6912e-02, -4.4409e-02, 6.8211e-02,
4.6241e-03, 3.1319e-02, -2.1107e-02, 6.0035e-03, 1.4964e-01,
-6.7831e-02, 1.4745e-02, 5.2527e-02, -6.9994e-02, 2.1821e-01,
-5.8981e-02, 2.3727e-02, 1.7595e-01, -6.3514e-02, -1.4614e-02,
-4.9377e-02, -5.7634e-02, 2.1368e-02, 8.1268e-03, -8.0793e-03,
1.0691e-01, 8.8387e-02, 1.6837e-01, 4.4735e-03, 6.0464e-03,
-8.0092e-02, 9.9142e-02, -6.0419e-02, -4.4261e-02, -2.5257e-03,
-3.6975e-02, 8.8504e-03, 1.8843e-02, -7.8382e-02, -8.0277e-02,
-1.5430e-04, 3.0245e-02, 2.0992e-02, -1.1724e-02, 1.4439e-02,
-2.1216e-02, -7.6793e-02, -2.1511e-02, -2.3928e-02, 2.0462e-02,
1.1922e-02, 1.2394e-01, -1.1283e-01, 1.1648e-01, 1.0198e-02,
1.9902e-02, 1.6933e-02, 1.6071e-01, -1.0314e-01, -9.2955e-02,
6.2739e-02, 8.0704e-03, -5.4295e-02, -7.0571e-02, -4.5403e-02,
-2.1017e-02, 6.2194e-02, -3.3825e-02, -7.7058e-02, -6.9817e-02,
4.9792e-02, -4.9608e-02, -4.7104e-02, 1.2334e-01, -1.1349e-02,
1.3979e-02, -3.5555e-03, 3.5426e-02, -1.2178e-02, 3.7618e-02,
2.0964e-02, -7.1856e-02, 1.0336e-01, 9.3889e-02, -1.0374e-01,
2.0182e-02, 7.8103e-02, -6.9763e-02, -2.3448e-02, -4.1621e-02,
3.3920e-02, -6.5924e-02, 1.0003e-02, -3.5212e-02, -7.2546e-02,
-2.9059e-02, -6.0618e-02, -1.1339e-02, -6.2499e-03, 1.1030e-01,
1.3663e-02, 2.5910e-02, 1.7901e-01, -5.8559e-02, 6.5660e-02,
-6.4674e-02, 8.1512e-02, 8.8267e-03, -5.2625e-02, 4.4193e-02,
-4.1416e-02, -5.5969e-02, 9.0321e-02, -1.1337e-01, -7.7335e-02,
-3.5951e-03, 1.2902e-01, 8.5955e-02, -3.6506e-02, -3.0237e-02,
1.0391e-01, 8.9485e-02, -8.9305e-02, -9.0084e-04, 4.0186e-02,
-7.9059e-02, -3.3435e-02, -6.7185e-02, -7.0585e-02, -4.8148e-02,
-6.1638e-02, -9.2540e-02, -6.5787e-03, 2.4978e-02, 5.1069e-02,
-6.7132e-02, -2.1348e-02, 2.6420e-01, -8.6522e-03, 1.0338e-01,
-4.7316e-02, 1.0545e-01, 5.2854e-02, 2.6282e-01, -4.9867e-02,
-5.2649e-02, 1.8472e-02, 6.9170e-03, 2.2844e-02, 1.1688e-01,
-5.1289e-02, 8.3716e-02, 2.5281e-03, -4.7895e-02, -1.0412e-02,
-5.6211e-02, 1.3854e-02, -7.7362e-02, -4.0217e-02, -3.7801e-02,
-6.8361e-02, 7.1031e-02, 2.5958e-02, 1.4184e-01, -4.7251e-02,
-3.9939e-02, -7.9589e-02, -6.1588e-02, -5.5557e-02, -6.3851e-03,
7.2374e-02, 8.0542e-04, -1.4829e-02, 1.1571e-01, -7.5016e-02,
-3.5892e-02, -4.6859e-02, 2.6491e-02, 1.4640e-02, 4.1933e-03,
2.1681e-02, -7.0140e-02, 1.6401e-02, -4.9298e-02, 1.4954e-01,
2.0415e-02], device='cuda:0')),
('fc_layers.2.weight',
tensor([[-0.0449, 0.0260, -0.0316, ..., -0.0176, -0.1216, 0.0076],
[-0.0485, -0.0489, 0.0348, ..., 0.0226, 0.0153, -0.0379],
[ 0.0194, -0.0191, 0.0004, ..., -0.0431, 0.0335, -0.0115],
...,
[ 0.0068, -0.0592, -0.0278, ..., -0.0028, -0.0194, 0.0109],
[-0.0839, -0.0813, -0.0541, ..., -0.0021, 0.0186, 0.0123],
[-0.0863, 0.0201, -0.0204, ..., -0.0358, -0.0292, -0.0287]],
device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.0751, 0.0729, 0.1204, 0.1658, 0.0914, -0.0336, -0.0429, -0.1426,
0.0468, -0.1180], device='cuda:0'))])},
{'ratio': 0.75,
'bias': 0,
'train_losses': [265.9616504062949,
187.52786583010024,
158.62524788770793,
145.57670230994466,
139.10341248129467,
133.75035625716987,
130.60026239974337,
127.90388610059262,
124.44686316153974,
124.31734450325291,
123.05119518220529,
120.74753241243579,
120.28721072151608,
118.13769535213241,
116.78785946294275],
'test_losses': [215.84342265596578,
162.74670712153116,
149.38934069988775,
137.1275212203755,
132.1765222409192,
127.51961567822624,
127.5222620309568,
122.73819152981628,
122.57403757057938,
118.64180428374047,
119.86733638539033,
116.83466278805452,
113.76798366097843,
113.27122707460441,
115.0821596173679],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[-2.7637e-02, -2.1915e-02, -9.0535e-02],
[-2.5672e-02, 1.2575e-01, -1.2472e-01],
[ 1.4642e-02, 6.9527e-02, -1.0552e-01]],
[[-1.7223e-01, 1.6461e-01, 9.0934e-02],
[-1.0741e-01, 1.4535e-01, 2.5212e-01],
[ 1.5408e-01, 1.1257e-01, 2.8690e-02]],
[[-2.3188e-01, -2.0850e-01, -1.7977e-02],
[-5.6799e-02, 9.0390e-03, 1.3146e-01],
[-5.0579e-02, -3.4658e-02, 1.7483e-02]]],
[[[-8.7385e-02, -1.3263e-01, -2.0599e-02],
[-2.2390e-02, 1.3561e-01, -1.5278e-01],
[ 7.2611e-02, 1.5909e-01, -9.7131e-02]],
[[-1.0606e-01, -1.1590e-02, -1.8301e-01],
[-4.5395e-02, -5.3533e-02, 4.5905e-02],
[-1.0397e-01, -9.8407e-02, 1.8894e-01]],
[[ 9.9178e-02, -4.6087e-03, -1.6187e-02],
[ 1.8793e-01, 1.6146e-01, -6.0880e-02],
[-1.8535e-01, 1.9497e-01, 1.3273e-01]]],
[[[ 1.5745e-01, -1.1187e-01, -4.2963e-02],
[-9.3365e-02, -1.8127e-01, 1.8500e-02],
[-7.5950e-02, 1.1259e-02, 1.8906e-01]],
[[ 3.3809e-02, 6.7635e-02, 7.0239e-02],
[-2.6181e-01, 7.4913e-03, -2.2853e-01],
[-6.1703e-02, -3.8602e-03, 1.2464e-01]],
[[-6.3211e-03, -6.5740e-02, 3.8614e-02],
[ 4.1369e-02, 1.7841e-02, -2.1335e-01],
[ 2.0212e-03, -1.0264e-01, 1.2341e-01]]],
[[[ 1.9926e-01, 2.4657e-01, -1.7144e-03],
[ 7.7309e-02, -1.8795e-01, 1.0262e-01],
[-2.6747e-01, -1.6900e-01, -9.2230e-03]],
[[ 2.0185e-01, 8.4535e-02, 2.1605e-01],
[-8.0699e-03, -1.4422e-01, -8.1130e-02],
[ 1.7088e-02, -2.2709e-01, -1.1559e-01]],
[[ 1.9660e-01, 6.0909e-02, 1.2011e-02],
[-1.7751e-01, 1.1125e-01, -7.3379e-02],
[-4.1693e-02, -1.4509e-01, 1.1245e-01]]],
[[[ 2.8607e-01, 2.2668e-02, 7.5191e-02],
[-6.5428e-02, 2.3313e-02, 1.3445e-02],
[-7.7891e-05, -1.4696e-01, -9.8016e-02]],
[[ 1.5737e-01, 1.2582e-01, -9.6541e-02],
[-1.7664e-01, -1.0032e-01, -2.5881e-01],
[-1.1715e-01, -1.6405e-01, -1.7058e-01]],
[[ 2.5563e-01, 4.1376e-02, -1.1144e-01],
[-3.2183e-04, -1.1867e-01, 1.4910e-01],
[ 1.7313e-01, 3.2551e-03, 6.0894e-02]]],
[[[ 1.4405e-01, 9.9109e-03, -1.8162e-01],
[ 7.2622e-02, 7.2834e-02, -6.4111e-02],
[ 6.4325e-02, 2.2263e-02, -1.4351e-01]],
[[ 9.3562e-02, -8.5227e-02, -1.9743e-01],
[ 3.5432e-01, 1.8055e-01, -2.6389e-01],
[ 2.0641e-01, 1.0924e-02, -2.4712e-01]],
[[-2.2521e-02, 4.7781e-03, -1.4531e-01],
[ 1.7592e-01, 3.4675e-02, 1.6069e-02],
[ 1.3058e-01, -1.8716e-01, -2.5055e-02]]],
[[[-2.2620e-02, 4.0968e-02, -4.4756e-02],
[-1.3840e-01, 1.2364e-01, 1.3328e-01],
[-7.0635e-02, -5.0650e-02, 4.8021e-03]],
[[ 1.5012e-01, 9.5991e-02, 2.9518e-01],
[-2.0730e-01, -1.8957e-02, 7.0599e-03],
[-1.8758e-01, -1.6917e-01, 3.4836e-02]],
[[-2.3460e-02, 2.4488e-01, 4.6984e-02],
[-1.0174e-01, 1.2915e-02, 9.3938e-02],
[-1.7537e-01, -1.3908e-01, -2.4091e-02]]],
[[[-1.5171e-01, -8.0670e-02, 2.0023e-01],
[-6.4966e-02, 4.2315e-02, 1.5977e-01],
[-1.3411e-01, -5.8093e-02, 9.3999e-02]],
[[-2.1220e-01, -2.4435e-01, -9.2980e-02],
[ 6.9721e-02, 1.4500e-01, 2.8107e-01],
[-1.3607e-01, 1.3666e-01, 3.7046e-02]],
[[-1.6582e-01, -1.3142e-01, 1.2737e-01],
[-7.1647e-02, 1.2816e-01, 1.7257e-01],
[-9.9901e-02, 2.0356e-02, 2.4750e-02]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([ 0.0816, 0.0589, -0.1745, 0.2043, -0.2844, 0.2309, 0.1698, 0.2315],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[-8.1593e-03, -5.6227e-02, -5.5308e-02],
[-2.5507e-02, -1.0241e-01, 6.6715e-02],
[-6.4239e-02, 5.0054e-02, 1.8783e-01]],
[[-1.4694e-02, -6.8184e-02, 8.0336e-02],
[-2.7655e-02, 1.0177e-01, 1.1705e-01],
[ 6.9759e-02, 1.4315e-01, 1.3663e-01]],
[[ 1.7167e-01, -2.0683e-02, 1.3507e-02],
[-8.3127e-03, 5.3909e-03, 8.6909e-02],
[ 8.4577e-02, -1.1448e-01, -2.0447e-01]],
...,
[[ 3.2503e-01, -6.7911e-03, -2.6296e-01],
[ 1.8566e-01, -1.7073e-01, -2.6532e-01],
[ 1.6256e-01, -7.1500e-02, -3.1695e-01]],
[[-2.5281e-01, -5.9809e-02, -8.5346e-02],
[-8.9183e-02, -4.6715e-04, 1.1174e-01],
[-1.2716e-01, 3.6993e-03, 1.3979e-01]],
[[-2.7163e-01, 1.7453e-02, 1.1015e-01],
[-1.9273e-01, 2.1123e-02, -3.4724e-02],
[ 6.2789e-02, 1.2534e-01, 1.9093e-01]]],
[[[-9.8161e-02, -1.6443e-02, 1.7728e-01],
[-1.0662e-01, 8.3737e-02, 1.6791e-02],
[-1.0011e-01, -8.5904e-02, 4.1125e-02]],
[[ 8.1313e-02, -7.6703e-02, 8.0319e-02],
[-2.5409e-02, 5.4068e-02, 2.5005e-02],
[ 3.0389e-02, 1.1262e-01, -7.0898e-02]],
[[ 1.6533e-01, 1.0049e-01, -3.9815e-02],
[ 9.6903e-02, 5.2271e-02, -8.5288e-02],
[ 1.1891e-01, 2.0096e-02, -6.4987e-02]],
...,
[[-1.0125e-01, -2.0313e-01, -2.0409e-01],
[-2.1829e-01, -2.4626e-01, -2.0008e-01],
[-1.3456e-01, -1.8145e-01, -9.1671e-02]],
[[-2.0979e-03, 1.1879e-01, 9.4803e-02],
[ 1.2636e-01, -7.3261e-02, 1.7048e-02],
[ 1.1877e-01, -4.9498e-05, 2.4337e-01]],
[[ 4.0588e-02, 3.1654e-02, 1.4019e-01],
[ 1.1617e-01, 2.9251e-02, 1.1660e-01],
[-6.5108e-03, 7.4099e-02, 3.0567e-02]]],
[[[-8.2979e-02, 1.1436e-02, -8.9134e-02],
[-3.9233e-02, -1.4921e-01, -5.0064e-02],
[-1.3934e-01, -1.0158e-01, 3.6315e-03]],
[[ 2.4558e-02, 5.7414e-02, 8.4219e-02],
[-6.5721e-02, 8.5823e-02, -5.0218e-02],
[ 3.5177e-02, 2.9976e-02, -5.5627e-02]],
[[ 4.6587e-02, -1.2003e-01, -6.9328e-02],
[-3.2028e-02, 3.0478e-02, -1.9101e-02],
[ 2.1010e-01, 7.6425e-02, 1.7905e-02]],
...,
[[ 8.4567e-02, 1.9594e-02, -4.6596e-02],
[ 1.5400e-02, 3.0734e-02, -1.0486e-01],
[ 4.8280e-03, -1.6168e-01, -1.8012e-01]],
[[-1.0918e-01, 9.3625e-02, 3.1595e-02],
[-2.0704e-01, 9.8651e-02, 9.2090e-02],
[-1.0649e-02, 6.5540e-02, 2.1941e-01]],
[[-1.3408e-01, 1.1787e-01, 4.0431e-02],
[-1.2948e-01, 5.5286e-02, 4.1915e-02],
[-1.5683e-01, 1.8682e-02, -2.1536e-02]]],
...,
[[[ 2.0497e-02, 7.9528e-02, 9.0001e-02],
[-7.7354e-03, 8.3344e-02, 7.3526e-02],
[-1.7176e-01, -1.0178e-01, -2.3262e-03]],
[[ 6.5389e-02, 5.4414e-02, 1.7603e-03],
[ 7.3522e-02, -3.3432e-02, 2.7914e-02],
[-1.2880e-02, 1.1314e-01, -4.8656e-02]],
[[ 2.7814e-02, -9.8098e-02, -8.1949e-02],
[-1.7459e-02, 1.2885e-01, 1.5998e-02],
[ 9.8218e-02, 1.5785e-01, -6.1283e-04]],
...,
[[ 1.4246e-01, 1.2386e-01, -6.2295e-02],
[ 2.6563e-03, 1.3454e-02, -2.2060e-01],
[-3.1258e-02, -2.8723e-02, -5.8912e-02]],
[[-8.0924e-02, 1.0415e-01, 6.4356e-02],
[-1.2633e-01, -1.2273e-01, 1.1413e-01],
[ 2.7654e-02, 4.3104e-02, 5.6304e-04]],
[[ 9.0776e-03, -4.0888e-02, -1.0066e-01],
[-2.9333e-01, -6.9229e-02, -1.5118e-01],
[-2.2358e-01, -8.3238e-03, 7.8065e-02]]],
[[[ 5.9848e-02, 6.7392e-02, 2.5622e-02],
[-9.4634e-02, 2.7586e-02, 1.1496e-01],
[-9.3213e-02, -9.3142e-02, 7.1565e-02]],
[[ 1.1615e-01, 5.3967e-02, -2.5468e-02],
[ 3.1489e-02, -4.8127e-02, 6.6042e-02],
[ 6.1823e-02, -4.6024e-02, -4.6753e-02]],
[[ 5.1570e-02, 9.4117e-02, -9.4735e-02],
[ 1.7666e-01, -1.4015e-02, -7.4255e-02],
[ 4.4628e-02, -4.8274e-02, 5.9745e-02]],
...,
[[-1.4037e-01, -1.5989e-01, -9.6803e-02],
[-4.9868e-05, -1.3788e-01, -1.0456e-01],
[-2.3229e-01, -1.1985e-01, -1.6667e-01]],
[[ 1.7060e-01, 7.6581e-02, -1.0678e-02],
[ 4.9440e-02, -5.0962e-05, 1.4052e-02],
[-2.1519e-02, -7.9317e-02, 5.0484e-02]],
[[ 2.2854e-02, 1.3163e-01, 1.3781e-01],
[ 1.1517e-01, 4.8226e-02, 5.6083e-02],
[ 3.5254e-02, 5.1451e-02, -2.6926e-02]]],
[[[ 4.1171e-03, -2.6993e-02, -8.6922e-02],
[-1.8215e-03, 1.3082e-02, -1.9468e-01],
[ 1.6851e-01, 9.5530e-02, -6.3545e-02]],
[[ 6.2081e-02, -2.7727e-02, 1.0447e-01],
[ 2.4251e-02, 9.0806e-02, 1.6821e-01],
[ 2.7426e-02, 3.9170e-02, 6.2352e-02]],
[[ 9.2140e-02, 1.5482e-02, 1.1201e-01],
[ 5.8399e-02, 4.7607e-02, 1.2606e-02],
[ 5.2917e-02, 1.0986e-02, -5.2852e-02]],
...,
[[ 1.4192e-01, 3.5182e-02, 1.9005e-02],
[ 1.0531e-01, -1.2893e-01, -2.2007e-02],
[-5.0932e-02, 1.4747e-02, -6.2369e-02]],
[[-1.2607e-01, 4.1555e-02, 1.6431e-02],
[ 7.0984e-02, -7.6817e-02, 1.0529e-01],
[-2.2684e-01, -2.1964e-01, -2.1830e-01]],
[[-3.3705e-02, -8.9410e-02, 8.7310e-02],
[ 7.2323e-03, 8.3037e-03, -6.3219e-02],
[ 2.8684e-02, 9.9215e-04, 6.7304e-02]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([ 0.2417, -0.0467, -0.1161, 0.1530, -0.2770, -0.0864, 0.0075, -0.1105,
-0.2600, -0.0570, -0.3229, 0.0528, 0.0783, 0.0300, 0.0672, 0.3363],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[-1.5625e-01, -3.4179e-01, 6.3985e-02],
[ 5.8865e-02, -2.2641e-01, 1.3087e-01],
[ 1.3086e-01, -1.3669e-01, -1.0751e-01]],
[[-8.2053e-02, -1.9760e-01, -2.1047e-01],
[ 5.5381e-02, 1.0314e-01, -8.7982e-02],
[ 9.1699e-02, 7.6878e-02, -5.2971e-03]],
[[-2.5460e-02, -1.8538e-01, -1.5831e-01],
[ 1.6444e-01, -1.1438e-01, -1.3343e-01],
[ 1.0798e-01, 2.4268e-02, 4.1451e-02]],
...,
[[-1.7245e-02, -7.8327e-02, -1.0320e-02],
[ 6.0000e-02, -5.6586e-02, -2.0009e-03],
[ 7.0566e-02, -5.3059e-02, -5.4548e-02]],
[[-4.8028e-02, -1.1703e-01, -1.3529e-01],
[ 1.3211e-01, 1.6856e-02, -7.8464e-02],
[ 1.2626e-01, 1.6436e-01, 3.0203e-03]],
[[-1.6721e-01, -1.1006e-01, 9.2291e-02],
[-4.8203e-02, -1.4354e-03, 7.1111e-02],
[ 1.2237e-01, 7.7278e-03, 7.2452e-02]]],
[[[ 2.0100e-02, 1.4140e-02, 1.3444e-01],
[-9.1555e-02, -6.2168e-02, -5.2806e-02],
[-1.2705e-01, 1.5497e-03, 1.2660e-02]],
[[ 8.2906e-03, 4.7900e-02, 4.8029e-02],
[ 7.9288e-02, 3.3335e-02, 5.6497e-02],
[-5.3372e-02, 5.1914e-02, 3.4410e-02]],
[[ 2.2276e-02, -1.1269e-01, -2.1221e-01],
[-1.1649e-03, 1.6706e-02, 3.9085e-02],
[ 2.0802e-02, 1.1775e-01, 1.1261e-01]],
...,
[[ 1.0827e-01, -1.6445e-02, -1.3314e-01],
[ 3.9418e-02, -1.4950e-01, -1.5729e-01],
[ 1.5841e-01, 1.6271e-01, 1.1007e-01]],
[[-3.1605e-02, -2.0438e-02, 2.7378e-02],
[-4.9244e-02, 7.8551e-02, 3.4862e-02],
[ 9.0980e-02, 1.8267e-01, 7.3085e-02]],
[[ 1.4032e-01, 1.5703e-01, 6.5131e-02],
[-1.2660e-01, -1.2349e-01, -3.7201e-02],
[-1.2013e-01, -1.1235e-01, -8.2807e-02]]],
[[[-6.3076e-03, 1.7945e-01, 1.8201e-01],
[ 2.9084e-02, 1.1584e-01, 2.0270e-01],
[ 2.2971e-01, 2.0084e-01, 3.9481e-02]],
[[-1.2498e-01, 2.6537e-02, 1.2843e-01],
[-7.8348e-02, -1.1612e-01, 1.4058e-01],
[ 4.1807e-02, -1.0295e-01, -4.5972e-02]],
[[-1.0694e-01, -7.7395e-02, -9.7010e-02],
[ 5.7698e-03, -5.9459e-02, 3.7163e-02],
[ 3.9202e-02, 3.5519e-02, -1.0576e-01]],
...,
[[-8.0514e-02, 2.7030e-02, -2.0252e-01],
[ 4.0971e-02, 1.0664e-01, -1.4161e-02],
[ 9.4371e-02, -1.0917e-01, -1.5821e-01]],
[[-9.9960e-02, 2.3349e-02, 1.1293e-03],
[-1.1916e-02, -8.2494e-02, 1.6323e-01],
[-3.2862e-02, 5.4551e-02, -5.3208e-02]],
[[ 5.4828e-02, 1.6939e-01, -5.6777e-04],
[ 1.0730e-02, 3.8002e-02, 3.8288e-02],
[ 4.4261e-02, 1.6596e-01, 5.0483e-02]]],
...,
[[[-2.2832e-01, 1.7061e-03, 5.2144e-03],
[-6.3033e-03, 2.2562e-01, 1.2690e-01],
[ 1.2419e-01, 7.7761e-02, -5.9719e-02]],
[[ 2.0562e-02, 1.0136e-01, -1.7548e-03],
[-1.2352e-01, 1.7719e-01, 4.8523e-02],
[ 1.8975e-02, 1.6328e-01, 4.0154e-02]],
[[-1.1393e-02, -1.6492e-02, -8.0676e-02],
[-1.0171e-02, 9.7753e-02, -1.5061e-01],
[-3.0539e-02, 4.2745e-02, -2.0815e-02]],
...,
[[ 9.4202e-02, 1.1881e-01, 2.6569e-02],
[ 1.3434e-01, -1.0954e-02, -1.9733e-01],
[ 4.1344e-02, -8.4690e-02, -1.2505e-01]],
[[-1.9790e-01, 7.2959e-03, 1.2676e-01],
[-7.7461e-02, 1.3812e-01, 1.8973e-01],
[-1.2344e-01, 7.7081e-02, 1.3909e-01]],
[[ 3.0363e-03, -6.7439e-02, -3.8704e-02],
[ 6.4704e-02, 3.0354e-02, -3.6903e-02],
[ 5.0878e-02, 6.4413e-02, 3.8172e-03]]],
[[[-7.1485e-02, -7.5512e-02, -1.0071e-01],
[ 3.5543e-02, 8.1360e-02, -2.0332e-02],
[-1.5678e-02, -4.2029e-02, -1.1447e-01]],
[[ 3.0297e-02, -3.6608e-02, 4.6323e-02],
[-1.3634e-02, -5.9503e-02, -2.0974e-01],
[ 2.3006e-04, -2.5990e-02, -1.5999e-01]],
[[-1.4564e-01, -1.8967e-02, 5.3739e-02],
[-9.6773e-02, -1.6036e-01, -8.5620e-02],
[ 5.9607e-02, 6.6281e-02, -5.0737e-03]],
...,
[[ 3.2467e-02, -4.1829e-02, 8.6170e-02],
[ 2.8028e-02, -3.0544e-02, 5.3125e-02],
[ 2.2344e-01, 2.8471e-02, -9.5982e-02]],
[[-9.4262e-02, -2.1732e-01, 4.3373e-02],
[-2.8319e-02, -2.0306e-01, 2.1648e-02],
[-1.4300e-02, -3.7464e-02, 4.1016e-02]],
[[ 9.1160e-02, 8.2053e-02, -1.4632e-02],
[ 2.4199e-02, 1.6367e-01, 3.1900e-02],
[ 3.5402e-02, 1.3859e-01, -5.1246e-03]]],
[[[-2.1073e-01, -1.3571e-01, 9.5713e-02],
[-1.1079e-01, -1.1425e-01, 1.0809e-01],
[ 1.3711e-01, 1.1238e-01, 1.7721e-01]],
[[-1.8841e-02, -1.8800e-01, -1.2410e-01],
[-1.0417e-01, -6.8528e-02, -3.7988e-02],
[ 8.2367e-02, 7.9717e-02, 1.7094e-02]],
[[-9.3372e-02, -1.5133e-01, -1.2632e-01],
[ 6.0741e-02, -3.1249e-02, 1.4319e-01],
[ 5.1525e-02, -3.3835e-02, 1.0752e-02]],
...,
[[ 4.8502e-02, 1.7652e-02, 3.1374e-02],
[ 8.6166e-02, 1.1256e-02, 6.3715e-02],
[-5.6870e-02, -2.2052e-01, -1.7317e-01]],
[[ 3.1822e-03, -9.8676e-02, -7.0382e-02],
[-5.2979e-02, -9.1599e-02, 5.6219e-02],
[-1.4287e-01, 1.0958e-02, 1.0046e-01]],
[[-1.0358e-01, -1.0093e-01, 8.0135e-02],
[ 3.9357e-02, 7.1449e-02, -1.2567e-03],
[ 2.1833e-01, 1.2254e-01, 1.7356e-01]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([ 0.0381, -0.0281, 0.2541, -0.0129, 0.0948, 0.1356, 0.0999, 0.1311,
-0.0728, 0.0616, 0.0216, 0.2715, 0.1760, 0.0603, -0.1015, 0.1933,
0.2028, 0.1559, -0.0412, 0.0518, -0.0102, 0.2303, 0.1200, -0.0781,
-0.0455, 0.2798, -0.0751, 0.0783, 0.0182, -0.0351, 0.0838, 0.2360],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[ 6.4285e-02, -2.1297e-02, -3.3127e-01],
[ 1.7758e-01, -2.1588e-02, -4.7354e-01],
[ 1.3351e-01, 9.2771e-02, -2.9052e-01]],
[[ 6.6281e-02, 8.7619e-02, 1.3267e-01],
[-1.6999e-02, -8.7038e-02, -5.0484e-02],
[-5.0458e-02, -1.2341e-01, -2.5169e-01]],
[[-1.3784e-01, 1.2247e-01, 6.8477e-03],
[-7.1918e-02, 8.5637e-02, -9.6068e-02],
[ 5.2404e-03, 2.4336e-02, 4.7432e-02]],
...,
[[-1.6711e-01, -3.7336e-02, 1.0592e-01],
[ 8.8235e-03, 5.7418e-02, 6.0777e-02],
[-2.9712e-02, -7.7883e-02, -1.2368e-02]],
[[-1.9615e-01, -1.8914e-04, 7.4547e-02],
[-2.7886e-01, -5.9133e-02, 2.3332e-02],
[-3.5247e-02, -5.5392e-03, 6.9794e-02]],
[[-1.2875e-01, -1.8207e-01, -1.9580e-01],
[ 2.3647e-03, 6.6804e-02, 3.9096e-02],
[ 5.3714e-02, 3.8149e-02, 5.4114e-02]]],
[[[-9.0512e-02, -3.0878e-02, 1.8295e-02],
[-1.6760e-02, 1.2866e-02, -9.7852e-02],
[-2.5107e-02, -2.8449e-02, -2.4650e-02]],
[[ 3.9876e-02, -7.9702e-02, -5.1494e-02],
[ 3.1836e-03, -4.4121e-02, -2.7710e-03],
[-5.2952e-02, 2.7526e-02, -1.0215e-01]],
[[-1.5064e-02, 3.2348e-02, -1.0417e-02],
[-9.2459e-02, -1.1748e-01, -2.4002e-02],
[-1.4836e-02, -1.0441e-01, -8.0939e-02]],
...,
[[ 1.8636e-02, -1.8370e-02, -5.1853e-02],
[-9.7775e-03, -2.5277e-02, -7.4832e-02],
[-1.1898e-02, -1.2342e-01, -1.1089e-01]],
[[-1.5615e-02, -5.2613e-02, -9.6078e-02],
[-1.5480e-02, -1.4484e-02, -3.3243e-02],
[ 1.5744e-02, 2.7809e-02, -2.4305e-03]],
[[-6.3409e-02, 2.6552e-03, -4.9794e-02],
[-6.6308e-02, 1.0230e-02, -5.4207e-02],
[ 1.9217e-02, -5.3366e-02, -1.8989e-02]]],
[[[-2.5360e-02, -8.5779e-02, -5.1933e-02],
[ 1.6698e-01, 7.7404e-02, -1.9420e-01],
[ 7.1703e-02, 1.0885e-01, -1.5928e-01]],
[[ 3.8783e-02, -9.6206e-02, -6.6461e-02],
[ 1.4885e-01, 4.4821e-02, -6.8838e-02],
[-1.2775e-01, -2.3984e-01, -2.1439e-01]],
[[ 1.7698e-02, -5.6262e-02, -9.7210e-02],
[-4.5633e-02, -8.6676e-02, -1.0685e-01],
[ 9.8960e-03, 5.0884e-02, -2.1917e-02]],
...,
[[ 4.6507e-02, -4.9108e-02, -1.6277e-02],
[-6.8465e-02, -1.3580e-01, -1.6104e-01],
[ 4.1917e-02, -5.1927e-02, -1.9583e-01]],
[[ 5.0679e-02, -6.3757e-03, -1.5620e-01],
[ 5.9770e-02, 1.1343e-01, -7.4926e-02],
[-8.4798e-02, -8.2672e-02, -9.9916e-02]],
[[-1.3509e-01, -1.1620e-01, 2.8598e-02],
[-1.8412e-01, -1.8272e-01, -1.1618e-01],
[-3.2917e-02, 3.3090e-02, 3.6547e-02]]],
...,
[[[-2.5472e-01, -1.8454e-01, -1.5949e-01],
[-3.6205e-01, -2.4385e-01, -1.5391e-01],
[-1.9152e-01, -3.9719e-01, -3.0624e-01]],
[[-1.0520e-01, -9.5025e-02, -2.4072e-01],
[ 4.3764e-03, 3.4600e-02, -7.5599e-02],
[-8.8183e-02, -1.2494e-01, -5.9927e-02]],
[[ 1.7416e-01, 8.6497e-02, -4.4989e-02],
[ 8.0670e-02, -6.3212e-02, -1.1576e-01],
[ 4.0498e-02, -9.3471e-02, -5.9021e-02]],
...,
[[-2.4744e-02, 1.1366e-01, -4.6140e-01],
[-1.4101e-01, 1.0300e-03, -4.0793e-01],
[-1.5934e-01, -1.3144e-02, -1.0706e-01]],
[[ 1.3769e-01, -1.3935e-01, -3.7190e-02],
[-1.0517e-02, -2.3465e-01, -1.5328e-04],
[ 2.1928e-02, -2.0567e-01, 1.7175e-02]],
[[ 8.8469e-02, -2.0457e-02, -7.8590e-02],
[ 7.7456e-03, -8.7233e-02, -1.8516e-02],
[ 1.3038e-01, 1.5760e-02, 4.4118e-02]]],
[[[-8.7764e-02, -2.4799e-02, -5.0480e-02],
[ 6.3707e-03, 1.7284e-01, 1.8547e-01],
[-1.2571e-01, -3.7999e-02, 4.3368e-02]],
[[ 1.1759e-01, 5.1863e-02, 4.8163e-03],
[ 1.0097e-01, 1.0917e-01, 5.5343e-02],
[-1.2267e-02, 5.5950e-02, -1.5330e-02]],
[[ 2.9948e-03, 6.0015e-02, -1.8902e-02],
[ 1.0987e-01, 3.9861e-02, -6.4806e-02],
[ 1.2622e-01, 2.7699e-03, 1.5711e-01]],
...,
[[-7.6024e-02, -4.6939e-02, -5.7555e-02],
[-2.9153e-02, 4.3918e-02, -8.1214e-02],
[ 2.0134e-02, -7.2489e-02, -2.9518e-02]],
[[ 4.3367e-02, 7.7731e-02, -8.8675e-02],
[-2.9935e-02, -1.2214e-02, -3.2554e-01],
[-2.1663e-03, -6.4760e-02, -2.2934e-01]],
[[-5.6969e-02, -1.3793e-01, -8.3068e-02],
[ 1.0651e-01, -8.2316e-02, -5.2778e-02],
[ 2.2472e-02, 4.2956e-02, 3.5711e-02]]],
[[[-3.9663e-03, -6.7031e-02, -1.3061e-01],
[-1.1830e-01, -3.0968e-03, -3.4076e-02],
[-5.7186e-02, 5.2091e-02, 1.1799e-01]],
[[-8.9977e-02, -1.1796e-01, 6.8223e-02],
[ 7.1893e-02, 8.3571e-02, -1.7319e-02],
[ 8.0837e-02, 9.7251e-02, 8.4986e-02]],
[[ 1.4496e-01, 1.8847e-01, 4.3245e-02],
[-2.9497e-02, 5.2338e-02, 1.2039e-01],
[-1.2705e-01, -1.2332e-01, 9.9311e-03]],
...,
[[-2.3363e-02, -2.2554e-02, -2.0785e-02],
[ 4.7086e-02, -1.2867e-01, -2.0113e-01],
[ 6.8847e-02, -5.5374e-02, -1.6029e-01]],
[[ 9.1335e-02, 5.8341e-02, 7.5597e-02],
[-3.5967e-02, 1.2496e-01, 9.0136e-02],
[-1.0650e-01, 6.9504e-02, 6.2174e-02]],
[[ 7.5260e-02, 3.9710e-02, 6.6634e-03],
[-1.1085e-02, 7.1865e-03, -2.9745e-02],
[-2.9634e-01, -2.1326e-01, -1.2565e-01]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([-0.0485, -0.0602, -0.0098, 0.0524, 0.1260, -0.0739, -0.1104, 0.0890,
-0.1328, 0.1379, 0.1541, -0.0177, 0.0890, 0.1112, 0.0846, 0.1145,
-0.0176, 0.1491, 0.1719, -0.0090, 0.0490, -0.0936, -0.0166, 0.1474,
0.0784, -0.0536, -0.1247, 0.1227, 0.1567, 0.1982, 0.1593, -0.0392],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[ 0.1095, 0.0086, 0.1146],
[ 0.0762, 0.0712, 0.1066],
[ 0.0471, 0.1334, 0.1207]],
[[-0.0136, 0.0478, 0.0675],
[-0.0184, 0.0625, -0.0263],
[-0.0026, 0.0561, -0.0945]],
[[-0.1919, -0.0229, 0.0919],
[-0.1709, -0.0994, 0.0352],
[-0.2729, -0.1978, 0.0776]],
...,
[[-0.0178, -0.0573, -0.0957],
[ 0.0735, 0.0660, -0.1542],
[ 0.0907, 0.0037, -0.1204]],
[[-0.1340, -0.1850, -0.0309],
[ 0.0299, 0.0008, -0.0523],
[-0.0737, -0.0208, -0.0249]],
[[-0.0440, 0.0521, 0.0678],
[-0.2538, -0.5333, -0.3017],
[-0.1616, -0.3309, -0.1039]]],
[[[-0.0324, 0.0992, 0.1353],
[-0.0425, 0.0926, 0.0565],
[-0.0606, 0.0732, 0.0208]],
[[-0.0410, -0.0513, 0.0182],
[-0.0553, 0.0331, -0.0164],
[ 0.0453, 0.0536, -0.0604]],
[[ 0.0211, 0.1141, 0.2000],
[-0.0457, 0.0989, -0.0738],
[-0.1912, 0.0523, -0.0038]],
...,
[[ 0.1031, -0.0699, -0.2907],
[-0.0220, 0.0710, -0.1245],
[-0.0979, 0.0033, -0.1368]],
[[ 0.0559, 0.1138, 0.1183],
[ 0.0124, 0.1541, 0.0768],
[-0.0882, 0.0812, -0.0510]],
[[ 0.0470, -0.1338, -0.0040],
[-0.0804, -0.1697, -0.0765],
[-0.1597, -0.0783, -0.0211]]],
[[[-0.0797, 0.0147, -0.0909],
[ 0.0609, 0.2656, 0.1285],
[ 0.0265, 0.1407, -0.0359]],
[[-0.0208, -0.0284, -0.1041],
[ 0.0032, -0.1386, -0.0568],
[-0.0238, 0.0401, 0.0377]],
[[ 0.0143, 0.1072, -0.0908],
[-0.0920, 0.1605, 0.0255],
[ 0.0858, 0.0844, -0.1388]],
...,
[[ 0.1854, 0.0064, -0.1341],
[ 0.0092, -0.1104, -0.2021],
[-0.1109, -0.0447, -0.1045]],
[[-0.0850, -0.0293, -0.0302],
[ 0.0695, 0.1474, 0.0539],
[ 0.1117, 0.0613, -0.0689]],
[[-0.1317, 0.0065, -0.1548],
[-0.0281, 0.0181, 0.0046],
[-0.0270, 0.1229, -0.0096]]],
...,
[[[ 0.1260, -0.0598, -0.1716],
[ 0.2748, 0.0010, -0.0734],
[ 0.2769, 0.1017, -0.1855]],
[[ 0.1067, 0.1498, -0.0034],
[-0.0703, 0.0321, -0.0242],
[ 0.0009, 0.1027, 0.0327]],
[[-0.0428, -0.1231, 0.0513],
[-0.2250, -0.3678, -0.1413],
[-0.1901, -0.1123, 0.0221]],
...,
[[ 0.0253, 0.0303, -0.1192],
[-0.1530, -0.1839, -0.2338],
[-0.1329, 0.0228, 0.1491]],
[[-0.2134, -0.1618, -0.1297],
[ 0.0582, 0.0526, 0.0893],
[-0.2991, -0.0307, -0.0502]],
[[ 0.0658, 0.0158, -0.2529],
[-0.0059, 0.0227, -0.0686],
[ 0.0580, -0.0474, -0.0405]]],
[[[-0.0949, -0.1008, 0.0363],
[-0.2221, 0.0006, 0.1162],
[-0.0125, 0.0876, 0.1029]],
[[ 0.0466, -0.0398, -0.0512],
[ 0.0343, 0.0267, -0.0187],
[ 0.0219, 0.0385, 0.0034]],
[[ 0.0177, -0.1167, -0.0666],
[-0.1458, -0.1482, 0.0184],
[ 0.1242, 0.0164, 0.0915]],
...,
[[-0.0196, 0.0303, -0.0824],
[ 0.1236, 0.1413, 0.0032],
[ 0.0665, 0.1336, -0.0440]],
[[-0.0287, -0.0561, 0.0297],
[-0.1187, -0.0733, 0.0429],
[-0.1732, -0.1166, 0.0228]],
[[-0.0675, -0.2796, -0.2695],
[-0.2006, -0.2525, 0.0125],
[-0.0474, 0.0054, 0.0916]]],
[[[ 0.0299, -0.0114, 0.0474],
[-0.0530, -0.0415, 0.1217],
[-0.1898, -0.0658, 0.1470]],
[[-0.0192, 0.0538, 0.0144],
[ 0.0521, 0.0536, -0.0165],
[ 0.0584, 0.0680, 0.0036]],
[[ 0.0735, -0.1033, -0.0235],
[-0.0433, 0.0312, -0.0431],
[ 0.0010, -0.0467, -0.0908]],
...,
[[-0.1769, -0.0138, -0.2372],
[-0.0133, 0.1607, -0.1058],
[ 0.0333, 0.0808, -0.1680]],
[[-0.0684, 0.0235, 0.1103],
[-0.1716, -0.0400, 0.0816],
[-0.0649, -0.1188, 0.0241]],
[[ 0.1419, -0.1488, -0.2809],
[-0.0237, -0.1613, -0.3710],
[-0.0101, -0.0705, -0.1442]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([ 0.2056, 0.1311, 0.1188, 0.0607, -0.1638, 0.2173, -0.0568, 0.0528,
-0.0193, -0.0865, 0.0913, 0.3029, -0.0428, 0.0987, -0.0328, 0.0643,
-0.0408, -0.0549, 0.0692, 0.0765, 0.0474, -0.0956, 0.1207, -0.0291,
0.0723, 0.0620, -0.0342, 0.0495, 0.1960, 0.0158, 0.2214, 0.1507],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[ 0.0988, 0.0900, -0.1657],
[ 0.1249, 0.2157, 0.1566],
[-0.0021, 0.0796, 0.0546]],
[[-0.1168, -0.0623, -0.1572],
[ 0.1581, 0.1211, -0.0187],
[ 0.1695, 0.1318, -0.0981]],
[[-0.0086, 0.0978, -0.0768],
[ 0.0448, 0.1122, 0.0127],
[-0.0539, 0.0875, -0.0027]],
...,
[[-0.0881, -0.0860, -0.0786],
[ 0.0084, -0.0047, -0.0805],
[-0.1387, -0.0449, -0.1675]],
[[-0.0619, -0.0757, -0.1411],
[-0.0829, 0.0133, -0.0144],
[-0.0910, -0.0176, -0.0547]],
[[-0.0856, -0.0624, -0.0869],
[ 0.0938, 0.0310, 0.0323],
[ 0.0822, -0.0642, -0.0105]]],
[[[-0.0336, -0.1591, 0.0101],
[ 0.0920, -0.1918, 0.0486],
[-0.0244, -0.2286, -0.0538]],
[[-0.1974, -0.1591, 0.0734],
[-0.1148, -0.1251, 0.1060],
[-0.0953, -0.2845, 0.0214]],
[[-0.2177, -0.1513, 0.1326],
[-0.0317, -0.1337, -0.0079],
[-0.1160, -0.1376, -0.0665]],
...,
[[-0.0086, 0.0652, 0.0307],
[ 0.0921, 0.0901, 0.0218],
[-0.0552, -0.1093, -0.1348]],
[[-0.0104, -0.1363, -0.1080],
[-0.0079, -0.2141, -0.0114],
[ 0.1424, -0.0476, 0.0668]],
[[-0.0719, 0.0127, -0.0565],
[-0.0552, -0.1353, 0.0146],
[ 0.0171, -0.1156, 0.1151]]],
[[[-0.1763, -0.2946, -0.0899],
[-0.0914, 0.0296, 0.0524],
[ 0.0732, 0.0452, -0.0255]],
[[-0.0528, -0.1613, 0.1306],
[-0.2606, -0.0845, 0.1002],
[ 0.0163, 0.0541, 0.0493]],
[[ 0.1018, 0.0006, 0.0895],
[-0.0798, 0.0072, 0.1209],
[-0.0084, 0.0780, -0.0642]],
...,
[[-0.0900, 0.0151, 0.0589],
[ 0.1019, -0.0509, -0.0394],
[ 0.2253, 0.1305, 0.1691]],
[[ 0.0021, -0.1069, -0.0367],
[-0.0796, 0.0071, 0.0791],
[-0.0389, 0.0348, -0.0753]],
[[-0.2198, -0.1436, 0.0151],
[-0.2740, -0.0547, -0.1199],
[-0.0851, 0.0175, -0.1154]]],
...,
[[[-0.0077, 0.0138, -0.0642],
[ 0.0270, 0.0865, -0.0722],
[-0.0446, 0.0443, -0.0413]],
[[-0.1421, -0.0825, 0.0039],
[-0.0723, -0.0442, -0.0309],
[-0.0360, -0.0443, -0.0353]],
[[ 0.0265, -0.0480, 0.0413],
[ 0.0235, 0.0150, -0.0877],
[ 0.1689, 0.2294, -0.0362]],
...,
[[-0.0763, 0.0334, 0.0864],
[-0.1159, -0.0567, 0.0166],
[-0.1395, -0.0950, -0.1532]],
[[ 0.0666, -0.2372, -0.1720],
[ 0.0958, -0.1422, -0.0627],
[ 0.0318, -0.0239, 0.0416]],
[[-0.0826, -0.0511, -0.1226],
[ 0.0194, -0.0068, 0.0387],
[-0.0250, 0.0043, 0.1545]]],
[[[ 0.0563, 0.1120, 0.0303],
[ 0.0182, 0.0986, 0.0437],
[ 0.0763, 0.0834, 0.0134]],
[[-0.2078, -0.0786, 0.0605],
[ 0.0113, 0.0389, 0.1822],
[ 0.0130, -0.0103, 0.0971]],
[[-0.1703, 0.0028, 0.2105],
[-0.1103, -0.0340, 0.1205],
[-0.0988, 0.0096, 0.1146]],
...,
[[-0.1856, -0.1414, -0.0138],
[-0.0104, -0.0404, 0.0411],
[ 0.1698, 0.0222, 0.0929]],
[[ 0.0041, 0.0035, -0.1047],
[ 0.1471, 0.0278, -0.0040],
[ 0.0453, 0.0111, -0.0230]],
[[-0.2019, -0.1609, -0.2032],
[-0.0601, -0.0633, 0.0190],
[-0.1085, -0.2834, 0.0529]]],
[[[ 0.0814, 0.0529, -0.0540],
[ 0.1244, 0.1420, -0.0147],
[ 0.0569, 0.0385, -0.0090]],
[[-0.1547, -0.1441, 0.0620],
[ 0.0012, 0.0410, 0.1392],
[-0.0058, -0.0071, 0.0687]],
[[-0.0907, -0.0567, -0.0480],
[ 0.0049, 0.0209, -0.0135],
[ 0.1212, -0.0288, -0.0914]],
...,
[[ 0.0056, 0.1118, -0.0355],
[ 0.1202, 0.0694, 0.2739],
[-0.1117, 0.1553, -0.0411]],
[[-0.1102, -0.1595, -0.0004],
[ 0.0578, 0.0355, 0.1079],
[-0.0348, -0.0043, -0.0538]],
[[ 0.0153, 0.0688, 0.1651],
[ 0.0962, 0.0716, 0.0983],
[ 0.0008, 0.1003, -0.0199]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([ 0.0006, -0.0303, 0.0320, -0.1081, -0.0071, -0.1529, 0.1734, 0.0420,
0.1968, -0.1304, -0.0719, -0.0551, 0.1635, 0.1793, 0.0443, 0.2530,
0.1283, 0.1964, 0.1727, -0.1269, -0.0897, 0.0469, -0.0046, 0.0933,
-0.1245, 0.1475, 0.0912, 0.2937, 0.1860, 0.0797, 0.0665, 0.2674],
device='cuda:0')),
('fc_layers.0.weight',
tensor([[ 0.1081, 0.1272, -0.1303, ..., -0.0343, 0.0055, -0.0022],
[-0.1223, 0.0311, 0.0155, ..., -0.0294, 0.0743, 0.0999],
[-0.0515, -0.0503, -0.0509, ..., -0.0261, -0.0656, -0.1224],
...,
[-0.0529, 0.0118, -0.0320, ..., -0.0090, -0.0355, -0.0301],
[-0.0054, -0.0140, -0.0163, ..., 0.0363, -0.0207, -0.0266],
[-0.0596, -0.0092, -0.0546, ..., -0.0704, -0.0134, 0.0095]],
device='cuda:0')),
('fc_layers.0.bias',
tensor([ 1.1668e-01, 5.9697e-03, -5.6385e-02, 1.9065e-01, 8.7989e-03,
-2.4191e-02, 2.7172e-01, 1.3433e-01, -7.5573e-02, -5.0088e-02,
-1.2477e-02, 5.7212e-03, -1.2797e-02, -8.6549e-02, -6.5557e-02,
-1.2155e-02, -2.1454e-02, -7.0707e-03, -1.7266e-02, -4.0555e-02,
-9.6443e-03, -3.8104e-02, -1.7843e-02, 3.7318e-02, -1.8389e-04,
1.8325e-02, 1.3552e-02, -3.7468e-02, 6.7273e-03, 7.1633e-03,
2.8582e-03, -7.3333e-02, -1.2151e-02, -2.8746e-02, -8.3579e-02,
-4.3914e-02, -1.0770e-01, -5.5328e-02, -3.5092e-02, -1.4532e-01,
-8.5888e-02, -2.3775e-02, -2.8859e-02, 1.3352e-02, -6.4032e-02,
2.0548e-02, 1.0234e-01, -8.7577e-02, -3.8669e-02, -5.2320e-03,
-3.4245e-02, 5.5486e-02, -4.4837e-02, 6.9687e-03, -4.4327e-02,
-6.7565e-03, 2.7793e-02, -3.3797e-02, 5.7987e-02, 1.6319e-01,
-1.3039e-02, -5.1970e-02, -5.2677e-02, 1.0356e-01, -3.8100e-03,
-2.7537e-03, 1.2478e-01, 7.3528e-02, 1.3289e-01, 1.7663e-02,
1.2619e-01, -2.4767e-03, 6.4181e-03, 8.9415e-03, -4.5147e-02,
1.7222e-02, -8.9790e-03, -3.1837e-02, -3.9383e-02, 1.4883e-01,
5.6722e-02, 2.1045e-02, -6.0324e-02, 3.1092e-02, -6.4326e-02,
2.2213e-02, -1.1971e-01, -1.5306e-02, -1.0263e-01, -1.2637e-01,
-4.1058e-02, -5.6575e-02, 4.1797e-03, 1.3452e-01, -1.2819e-02,
2.6866e-03, 1.6029e-01, 2.3245e-01, -4.2328e-02, -6.4599e-02,
1.7108e-02, -2.1896e-02, 1.0525e-01, -5.5853e-02, 3.2976e-02,
1.0699e-01, 1.1300e-01, -6.1979e-02, -5.4210e-02, -5.3009e-02,
-2.9376e-02, -2.1714e-02, 2.2468e-02, 4.5361e-02, -9.8505e-02,
-3.5475e-02, -5.0823e-02, -4.4347e-03, 5.7796e-02, 5.5170e-02,
-8.3426e-03, -8.1350e-02, -5.5494e-02, 6.4789e-02, 3.1395e-02,
-5.0079e-02, 1.3285e-01, -9.2993e-02, -9.9771e-02, 6.8727e-02,
-5.5942e-02, -7.6001e-02, -2.9487e-02, -4.0275e-02, -7.5761e-02,
2.5367e-02, 8.8060e-02, 2.9855e-01, -3.4616e-02, 1.4284e-01,
-4.0483e-02, -8.1688e-02, 1.4179e-01, 1.3529e-01, -4.6716e-02,
-6.3773e-02, 7.7511e-02, 3.6422e-02, -2.0622e-02, 2.0101e-01,
-6.7339e-02, -5.8592e-02, -2.0814e-02, -2.4377e-03, -3.7400e-02,
1.5807e-01, 2.9303e-02, 1.8412e-03, -3.9468e-02, -2.9276e-03,
-2.6781e-02, -9.4567e-03, -8.7347e-04, 2.9797e-02, 8.6355e-02,
-2.1085e-02, 5.2177e-02, 4.9052e-02, 1.7575e-01, 1.5698e-01,
1.4490e-01, 1.5020e-01, -6.6630e-02, -8.4626e-02, -2.9955e-02,
-4.7419e-02, 9.9786e-02, -4.3411e-02, 1.1608e-02, -2.9619e-03,
8.3232e-02, -3.0489e-02, 2.0268e-01, -7.5263e-02, 7.9024e-02,
-4.3677e-02, -1.8006e-02, -3.4929e-02, 5.9259e-03, -4.0002e-02,
1.1322e-01, -5.7925e-02, 1.2260e-02, 1.2257e-03, 1.9178e-01,
-3.7949e-02, -1.6775e-03, -2.6568e-02, 3.1243e-02, 1.2104e-02,
5.9390e-02, -8.7065e-03, 9.3895e-03, -5.7135e-02, -5.0516e-02,
-7.0797e-02, -7.7698e-02, -3.5663e-02, -5.0379e-02, 1.2127e-01,
-4.6770e-02, -7.7563e-02, -2.5216e-02, 9.8359e-02, -2.6719e-03,
8.8916e-02, -2.6189e-02, 1.1815e-01, 3.9994e-02, -8.6231e-02,
3.6091e-01, -3.9968e-02, 7.4097e-05, -1.9611e-02, 5.7974e-03,
6.6413e-02, 1.6438e-02, -6.6922e-02, 2.2760e-02, -7.8957e-02,
-5.4023e-02, -3.0165e-02, -4.3689e-02, 9.6362e-02, -2.1583e-02,
1.2996e-01, 2.9543e-03, 1.1825e-02, 1.7009e-01, -2.3195e-01,
-3.6512e-02, 3.8394e-02, 9.2146e-02, -6.7576e-02, 3.2664e-03,
-1.0576e-01, -6.7660e-02, 1.1281e-02, 3.9100e-02, 2.0429e-01,
-6.8519e-02, -1.1334e-01, 4.3794e-02, 1.4112e-02, -5.0856e-02,
-6.1464e-02], device='cuda:0')),
('fc_layers.2.weight',
tensor([[ 0.2420, -0.0078, -0.0777, ..., 0.0493, 0.0063, -0.0579],
[ 0.0752, -0.0075, -0.0375, ..., -0.0054, -0.0300, -0.0670],
[-0.1712, -0.0738, -0.1134, ..., 0.0200, -0.0076, -0.0253],
...,
[-0.0167, -0.0825, -0.0241, ..., 0.0542, -0.0664, -0.0272],
[ 0.0094, -0.1436, -0.1962, ..., -0.0056, -0.1068, -0.0652],
[-0.1021, -0.0221, -0.0064, ..., -0.0483, 0.0342, -0.0601]],
device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.0892, -0.0072, 0.1831, 0.1049, 0.1121, -0.1079, -0.1139, -0.2029,
0.0307, -0.1510], device='cuda:0'))])},
{'ratio': 0.75,
'bias': 32,
'train_losses': [279.6168843320527,
203.36498576267437,
164.97499779132025,
152.52692190213145,
144.19123609090974,
139.5601146514503,
135.29778215793624,
131.80951470667156,
129.88497053765502,
128.31610959685077,
125.94128367663679,
125.29868494486517,
123.76244435093997,
122.77083298465136,
121.533043219573],
'test_losses': [247.83773630740595,
175.77687873092353,
157.2982120326921,
145.03994482638788,
140.19019647205576,
139.42091278001374,
133.3723391972336,
130.08592545752433,
125.52949979726006,
123.543093400843,
124.17986240106471,
120.62166309824177,
122.99814709962583,
121.84751430212283,
116.59204730333066],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[-1.8924e-02, -1.0234e-01, 1.2984e-02],
[-8.5669e-02, -6.8548e-02, 1.3016e-01],
[ 9.0769e-02, 3.4107e-02, -4.0970e-02]],
[[-1.8763e-01, -1.0332e-01, -1.3092e-03],
[-8.3894e-02, -1.5585e-02, -1.5577e-01],
[ 3.8009e-02, 4.4879e-02, 8.6099e-03]],
[[ 7.1973e-02, 7.5138e-02, -3.0354e-02],
[ 3.0677e-02, -1.7543e-01, 6.4281e-02],
[ 2.0128e-02, 8.7496e-02, 8.7893e-02]]],
[[[-3.0646e-02, 6.2107e-02, 2.2932e-01],
[-1.0512e-01, -2.1269e-01, -7.9659e-02],
[ 1.8585e-02, -3.4185e-02, 1.5825e-01]],
[[-1.7138e-01, 1.7267e-01, 1.8876e-01],
[-1.7871e-01, -1.3567e-01, -1.6956e-02],
[-1.4883e-01, -2.0031e-01, 1.6015e-01]],
[[ 9.1825e-02, 1.5753e-01, 1.1165e-01],
[-1.9225e-01, 1.2309e-01, 1.2246e-01],
[-1.5552e-01, 1.1328e-01, -1.7355e-01]]],
[[[-1.9168e-01, -1.0574e-01, -2.1473e-02],
[-2.1787e-01, -4.5191e-02, 1.5883e-01],
[-4.0711e-02, 1.0388e-01, 3.3297e-01]],
[[-2.2526e-01, 1.4950e-02, 7.3946e-02],
[-2.7242e-01, 2.4354e-02, -3.1352e-02],
[-4.0864e-02, 1.0111e-01, 3.1594e-01]],
[[-1.7850e-01, -3.4405e-03, 1.5147e-01],
[-4.5263e-03, -1.2319e-01, -2.7732e-04],
[-4.4636e-02, 1.7235e-01, 7.8898e-02]]],
[[[-5.9764e-02, 1.5309e-01, 7.9743e-02],
[-4.2807e-02, -1.1749e-01, 1.2566e-01],
[-1.8138e-01, -5.2859e-02, 2.0680e-01]],
[[-1.5705e-01, 8.4935e-02, 4.5204e-02],
[ 7.6558e-02, 9.3237e-02, 9.6658e-02],
[-1.3596e-01, 5.4510e-02, 1.5251e-01]],
[[-3.4024e-01, -2.0921e-01, 8.8966e-02],
[-6.1604e-02, -2.8724e-02, 1.9885e-02],
[-6.1281e-02, 1.2322e-01, 1.0362e-01]]],
[[[ 7.6542e-02, -2.2611e-01, -3.0612e-01],
[ 1.2004e-01, -3.8992e-02, -4.5639e-02],
[ 1.4497e-01, 1.5697e-01, 9.0411e-02]],
[[ 1.2381e-01, -8.9057e-02, -2.4307e-01],
[-6.4210e-02, 3.9792e-02, -5.5743e-02],
[ 2.6906e-01, -1.8900e-02, -5.4682e-02]],
[[-1.0187e-01, -1.2725e-01, -1.5454e-01],
[ 5.5238e-02, -9.4189e-03, 1.6114e-01],
[ 7.3129e-02, 2.2701e-01, -2.3811e-02]]],
[[[ 5.4630e-02, 1.9840e-01, 7.6797e-02],
[ 1.8902e-01, -1.5756e-02, -2.1648e-02],
[-2.1248e-01, -2.7099e-01, -5.2918e-03]],
[[ 1.9695e-01, 8.0697e-02, 2.3775e-01],
[ 3.4526e-02, 1.2184e-01, -2.0169e-01],
[-1.9003e-01, -2.9903e-01, -3.9972e-02]],
[[ 2.7072e-01, -8.0551e-02, 7.5417e-02],
[ 6.8338e-02, -8.0669e-02, 1.3736e-01],
[-1.4526e-01, -8.9952e-02, -1.2442e-01]]],
[[[ 1.7320e-01, 4.2827e-02, 1.7875e-01],
[ 1.1251e-01, -9.7753e-02, -2.0478e-02],
[-1.3181e-01, -2.0817e-01, 1.6381e-01]],
[[-6.9557e-02, -1.5204e-01, 1.1115e-01],
[-9.6135e-02, -7.9967e-02, -1.8102e-01],
[ 9.8063e-02, -1.8933e-01, -1.2449e-01]],
[[ 2.1046e-01, -3.3440e-02, 1.6911e-02],
[-3.1429e-02, -9.5980e-02, 1.3249e-01],
[ 2.0953e-01, 1.0522e-01, -1.2791e-01]]],
[[[ 6.4710e-02, 6.1884e-02, -3.3035e-01],
[ 3.6775e-02, 1.4978e-01, 3.6623e-02],
[ 2.7343e-01, 1.9924e-01, -2.6016e-01]],
[[-4.6672e-02, 1.2160e-01, -1.4304e-02],
[-2.2824e-02, 9.1912e-02, -1.7900e-01],
[ 1.4910e-01, 6.2459e-02, -1.4039e-01]],
[[-1.3931e-01, -7.7546e-02, 1.7571e-01],
[ 3.7545e-02, -1.8415e-01, 2.1211e-01],
[-7.9043e-02, -2.3128e-01, 2.2032e-02]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([-0.1162, 0.0753, 0.2446, -0.1552, 0.2461, 0.2532, -0.2775, -0.1984],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[ 0.0246, 0.1424, 0.1519],
[ 0.1114, -0.1287, -0.0663],
[ 0.0334, -0.2631, -0.2856]],
[[ 0.1090, -0.1077, -0.2150],
[-0.0187, 0.0406, -0.1774],
[ 0.0202, 0.1471, -0.1591]],
[[ 0.1829, 0.1335, -0.0736],
[ 0.0541, -0.0339, -0.1649],
[ 0.1139, -0.1330, -0.1619]],
...,
[[-0.1681, -0.2535, -0.2913],
[ 0.1608, 0.1618, 0.1015],
[ 0.1718, 0.1787, 0.0015]],
[[ 0.0988, -0.1958, -0.1752],
[ 0.1387, -0.1692, -0.2483],
[ 0.3769, 0.0992, 0.0359]],
[[-0.1730, 0.0775, 0.0782],
[-0.0933, 0.0966, 0.0966],
[ 0.1227, 0.1660, -0.0274]]],
[[[ 0.0455, -0.0778, -0.0114],
[-0.0270, -0.0249, 0.0067],
[ 0.1285, 0.0942, -0.0570]],
[[-0.0182, 0.0067, 0.1721],
[ 0.0946, 0.1316, 0.2788],
[-0.1215, -0.0448, 0.0826]],
[[-0.2926, -0.1925, 0.0668],
[-0.0733, -0.0071, 0.0867],
[ 0.0132, 0.0712, 0.1417]],
...,
[[ 0.0324, 0.1102, -0.0551],
[ 0.1107, 0.0044, -0.0118],
[-0.1975, -0.2240, -0.1260]],
[[ 0.0880, -0.1046, -0.0448],
[-0.0792, 0.0837, 0.1759],
[ 0.0251, -0.0025, 0.0290]],
[[-0.3402, -0.1758, -0.2596],
[-0.2304, -0.3248, -0.3476],
[-0.2311, -0.2556, -0.3198]]],
[[[-0.2731, -0.2678, -0.2238],
[-0.1170, -0.3362, -0.0615],
[-0.0882, -0.0775, 0.0232]],
[[-0.1436, -0.0055, -0.1052],
[ 0.1451, -0.0706, -0.0238],
[-0.0985, -0.0972, 0.0012]],
[[ 0.0334, -0.0041, -0.0640],
[ 0.0637, -0.0190, -0.0732],
[-0.0172, 0.0579, -0.0005]],
...,
[[-0.1135, -0.1294, -0.0668],
[ 0.0444, 0.0464, 0.0441],
[-0.0639, -0.1479, -0.0175]],
[[-0.2073, -0.0580, -0.1688],
[ 0.0895, 0.1552, 0.1830],
[ 0.0733, 0.0817, -0.0041]],
[[-0.2156, -0.1903, 0.1847],
[-0.0714, -0.2683, 0.3674],
[-0.1782, -0.0282, 0.3109]]],
...,
[[[-0.0306, -0.0538, 0.0584],
[-0.1238, 0.0675, 0.0061],
[ 0.0930, 0.0105, 0.1044]],
[[-0.0115, -0.0334, -0.1808],
[ 0.0510, -0.2031, -0.1627],
[-0.0470, -0.1093, 0.0224]],
[[ 0.0694, -0.4205, -0.4187],
[-0.1024, -0.4486, 0.0870],
[-0.2254, -0.1501, 0.0195]],
...,
[[ 0.0692, 0.2131, 0.1597],
[ 0.1342, 0.2155, 0.0010],
[ 0.0227, 0.0162, 0.1158]],
[[-0.1424, -0.0282, 0.1473],
[-0.0444, -0.1646, 0.0592],
[-0.0221, -0.0333, 0.0124]],
[[ 0.0369, -0.1359, -0.1256],
[-0.0904, 0.0794, -0.0449],
[-0.0051, -0.0046, -0.0145]]],
[[[ 0.1928, 0.0787, 0.0739],
[-0.0951, 0.0394, -0.0159],
[ 0.0959, -0.0923, -0.1570]],
[[ 0.0786, -0.0160, -0.0304],
[ 0.1115, -0.0620, -0.0113],
[ 0.0385, -0.0918, -0.0610]],
[[ 0.0746, 0.1780, 0.0334],
[ 0.1894, 0.1382, -0.0404],
[ 0.1379, -0.0154, -0.0090]],
...,
[[-0.1006, -0.1493, -0.0315],
[-0.1314, -0.2951, -0.0258],
[-0.2790, -0.0037, 0.0426]],
[[-0.0042, -0.0345, -0.0560],
[ 0.0436, -0.1041, -0.1650],
[-0.1006, -0.2375, -0.0352]],
[[-0.1143, 0.0185, -0.0202],
[ 0.0142, 0.0146, 0.0108],
[-0.0172, -0.0269, -0.1163]]],
[[[-0.0733, -0.0296, -0.0510],
[ 0.0119, -0.0727, 0.0160],
[ 0.0037, 0.0084, 0.0599]],
[[-0.0275, 0.0435, -0.0288],
[ 0.0985, -0.0050, -0.0328],
[-0.0279, 0.0414, -0.0379]],
[[-0.0666, -0.3059, -0.1726],
[-0.1530, -0.2210, -0.1057],
[-0.0305, -0.0013, 0.2091]],
...,
[[ 0.2018, 0.2344, 0.0943],
[-0.0567, 0.0976, -0.0144],
[-0.0529, 0.0246, 0.0751]],
[[-0.0979, -0.1030, -0.0981],
[-0.1264, 0.0430, -0.0109],
[-0.0651, -0.1360, 0.0479]],
[[ 0.1978, 0.0310, -0.1950],
[ 0.1520, 0.0484, -0.1995],
[-0.1548, -0.1922, -0.2371]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([-0.3467, 0.0162, 0.1326, 0.1413, 0.0709, 0.1225, 0.2040, -0.0279,
0.0219, -0.3150, 0.2120, 0.2915, -0.2583, -0.0279, 0.0263, 0.1224],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[-0.0711, 0.1315, 0.0772],
[ 0.0690, -0.0086, 0.0362],
[ 0.1949, -0.2787, 0.0133]],
[[ 0.0926, 0.0074, 0.0586],
[-0.0183, 0.0038, -0.0056],
[ 0.0484, 0.0891, 0.0470]],
[[ 0.0092, -0.0524, 0.0775],
[ 0.0960, -0.2198, 0.0367],
[ 0.1217, -0.0708, -0.0126]],
...,
[[-0.2372, -0.1618, 0.1659],
[-0.3870, -0.2731, -0.0828],
[-0.1076, -0.3142, -0.2251]],
[[-0.2746, -0.2065, -0.0829],
[-0.2064, -0.2634, -0.1900],
[-0.0823, -0.0050, -0.0566]],
[[-0.0583, 0.0872, 0.1333],
[-0.1927, -0.0968, -0.0053],
[-0.1210, -0.1492, -0.0486]]],
[[[-0.0217, 0.0252, -0.0250],
[-0.0430, -0.0363, 0.0468],
[-0.0604, 0.0145, -0.0456]],
[[-0.0245, -0.0412, 0.0638],
[-0.0479, -0.0853, -0.0606],
[ 0.0147, -0.0546, 0.0222]],
[[-0.0598, 0.0338, -0.0982],
[ 0.0242, -0.0707, -0.1038],
[-0.0058, -0.0154, -0.1067]],
...,
[[-0.0307, -0.1144, 0.0478],
[-0.0033, -0.0468, -0.0950],
[-0.0044, -0.1076, -0.0469]],
[[-0.0927, -0.0085, -0.0442],
[ 0.0231, 0.0188, -0.0513],
[-0.0598, 0.0453, -0.0286]],
[[-0.0653, -0.0582, 0.0455],
[ 0.0298, -0.0749, -0.0126],
[-0.0519, -0.0810, -0.0708]]],
[[[-0.0888, -0.2328, -0.1217],
[-0.1140, -0.0843, -0.1953],
[-0.0159, -0.1460, -0.0509]],
[[ 0.1016, -0.0119, 0.0713],
[-0.0211, -0.1535, -0.0148],
[-0.0464, -0.1864, 0.1338]],
[[ 0.0714, 0.1013, 0.0970],
[ 0.0655, -0.0040, 0.0460],
[ 0.0802, 0.0992, 0.0597]],
...,
[[-0.0087, -0.0012, -0.0842],
[-0.2047, -0.2206, -0.0460],
[-0.1126, 0.0748, 0.0152]],
[[ 0.0557, 0.0059, -0.0266],
[-0.0622, -0.0833, -0.0829],
[-0.0667, -0.1298, -0.0727]],
[[-0.0329, 0.1426, 0.1702],
[-0.1336, -0.1230, 0.0728],
[-0.1789, -0.0028, 0.0649]]],
...,
[[[ 0.1889, 0.0087, -0.1169],
[ 0.1364, -0.1137, 0.1058],
[ 0.1461, 0.1403, -0.0967]],
[[-0.0159, -0.2540, 0.0502],
[-0.0408, -0.0616, -0.0494],
[-0.0157, -0.0624, 0.1047]],
[[-0.1001, 0.0228, 0.0109],
[-0.0167, -0.0544, 0.0522],
[-0.0385, -0.0065, 0.0746]],
...,
[[ 0.0128, 0.0896, 0.0490],
[ 0.0300, -0.0678, 0.0208],
[-0.1436, -0.2481, -0.0605]],
[[-0.0691, 0.0849, 0.1095],
[ 0.0187, 0.0671, 0.1434],
[ 0.0666, 0.0063, 0.0487]],
[[ 0.0492, 0.1052, 0.0835],
[ 0.1710, 0.1500, 0.0409],
[ 0.0107, -0.0674, -0.0671]]],
[[[-0.1534, -0.0490, -0.0720],
[-0.1682, 0.1110, -0.0244],
[ 0.0214, 0.0166, -0.1740]],
[[ 0.0210, 0.0560, -0.1208],
[-0.0250, -0.0124, 0.0155],
[-0.0857, 0.0796, 0.1362]],
[[-0.0609, 0.1379, 0.2277],
[ 0.0738, 0.0095, 0.0340],
[-0.0641, -0.2259, -0.1302]],
...,
[[ 0.0656, -0.0582, 0.0750],
[-0.0396, -0.0790, 0.1242],
[-0.0011, 0.0305, 0.0614]],
[[-0.0140, 0.2479, 0.0657],
[ 0.1011, 0.0309, -0.0330],
[ 0.0776, -0.1501, -0.2242]],
[[-0.0883, -0.0488, 0.0254],
[ 0.0493, 0.2020, 0.2046],
[-0.0166, 0.1770, 0.1215]]],
[[[ 0.0064, -0.1699, -0.0699],
[-0.0009, -0.1376, -0.0932],
[ 0.0551, -0.1342, -0.1698]],
[[ 0.0393, -0.0253, -0.0381],
[-0.0011, 0.0129, -0.2275],
[ 0.1007, 0.0423, -0.0993]],
[[ 0.0825, 0.1057, 0.0891],
[ 0.0796, 0.1503, 0.1273],
[-0.1148, 0.0306, 0.1058]],
...,
[[ 0.0073, -0.0283, -0.0272],
[ 0.0700, -0.1770, 0.1788],
[ 0.1302, 0.0153, 0.2581]],
[[-0.0760, -0.1333, 0.0415],
[ 0.0943, 0.0460, 0.0787],
[-0.0364, -0.0648, -0.0183]],
[[ 0.0381, 0.0125, -0.0761],
[ 0.0887, -0.1149, -0.1007],
[ 0.1409, -0.0892, 0.1404]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([ 0.0523, -0.0369, 0.2201, -0.2073, 0.1735, 0.0510, 0.0710, -0.0595,
-0.0466, 0.1003, -0.0892, 0.1949, -0.0800, -0.0797, 0.1678, 0.1581,
0.1733, -0.2735, -0.1060, 0.0965, 0.0394, 0.1749, 0.2901, 0.0343,
-0.1250, -0.0914, -0.1132, 0.1387, -0.1047, 0.1075, -0.0019, 0.1405],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[ 0.2001, 0.1611, 0.0129],
[-0.0335, -0.0271, -0.2200],
[-0.1433, -0.2007, -0.1524]],
[[-0.0097, 0.0042, 0.0256],
[-0.0414, -0.0025, 0.0246],
[-0.0070, -0.0415, -0.0152]],
[[-0.0654, -0.0526, -0.0289],
[-0.1316, -0.0288, -0.0182],
[-0.1031, -0.0200, -0.0575]],
...,
[[ 0.0280, 0.0740, 0.1266],
[-0.0859, -0.0044, -0.0535],
[-0.1482, -0.1781, -0.1166]],
[[-0.3057, -0.2266, 0.0252],
[-0.0245, -0.0417, 0.1536],
[-0.1414, -0.1920, 0.0795]],
[[-0.1049, 0.1194, 0.1344],
[-0.1227, 0.0993, -0.0056],
[-0.1527, -0.0192, -0.0832]]],
[[[ 0.0660, -0.2349, -0.2232],
[ 0.0445, -0.0632, -0.1148],
[ 0.0155, -0.2719, -0.0693]],
[[ 0.0292, -0.0356, 0.0174],
[-0.0428, -0.0067, -0.0082],
[-0.0703, 0.0087, -0.0039]],
[[-0.0786, -0.0730, -0.0197],
[ 0.0409, 0.0071, -0.1327],
[ 0.1904, 0.0465, -0.1000]],
...,
[[ 0.0657, -0.0059, 0.0251],
[ 0.0123, -0.1818, -0.0023],
[-0.0272, -0.2247, 0.0955]],
[[-0.1471, -0.0577, -0.1635],
[-0.0953, 0.0224, -0.1132],
[ 0.0715, 0.0126, -0.1692]],
[[-0.0563, 0.1551, 0.0493],
[ 0.1738, -0.2444, 0.1188],
[-0.0288, -0.3718, 0.2823]]],
[[[ 0.0022, -0.0382, -0.0825],
[-0.0373, 0.0422, -0.0225],
[ 0.0134, 0.0093, 0.0250]],
[[ 0.0217, -0.0326, -0.0260],
[ 0.0164, -0.0372, -0.0651],
[ 0.0531, -0.0181, -0.1009]],
[[-0.1252, -0.0321, 0.0774],
[-0.0486, 0.0074, -0.0533],
[ 0.0717, -0.0523, 0.0517]],
...,
[[-0.0236, -0.0217, -0.0682],
[-0.1155, -0.0141, -0.0874],
[ 0.0264, -0.0135, -0.1427]],
[[-0.1108, -0.0989, -0.1335],
[ 0.1284, -0.0372, -0.0241],
[ 0.0158, -0.0777, -0.0012]],
[[-0.0949, -0.0349, -0.1598],
[-0.1009, 0.0735, -0.1245],
[ 0.0095, 0.0817, -0.0268]]],
...,
[[[ 0.1581, 0.1111, 0.1025],
[ 0.1413, 0.0909, 0.0353],
[ 0.1609, 0.0252, 0.0275]],
[[-0.1024, 0.0289, 0.0563],
[-0.0163, -0.0243, -0.0217],
[ 0.0635, -0.0029, 0.0250]],
[[-0.1080, 0.0680, 0.0220],
[-0.0950, -0.0945, -0.1429],
[ 0.0286, 0.0079, -0.0336]],
...,
[[ 0.0677, 0.0677, -0.0354],
[ 0.0577, 0.0687, 0.0402],
[ 0.0751, 0.0935, 0.0887]],
[[ 0.0691, 0.1729, 0.1815],
[ 0.0769, 0.0051, 0.0244],
[-0.2173, -0.1623, -0.2709]],
[[ 0.2121, 0.1388, -0.1120],
[ 0.1168, -0.0625, -0.2001],
[-0.1212, -0.1233, -0.1693]]],
[[[-0.0389, -0.0581, -0.0147],
[-0.0570, -0.0873, -0.0289],
[ 0.0487, 0.0317, -0.0066]],
[[ 0.0556, -0.0435, -0.0231],
[-0.0097, 0.0023, -0.0077],
[ 0.0333, -0.0395, 0.0420]],
[[-0.0691, -0.0549, 0.0009],
[-0.0390, 0.0159, 0.0287],
[-0.0688, -0.0157, -0.0114]],
...,
[[ 0.0110, -0.0541, -0.0835],
[-0.1032, -0.0991, -0.0652],
[-0.1372, -0.0768, 0.0388]],
[[ 0.0044, 0.0402, 0.0250],
[-0.0665, -0.0389, -0.0966],
[ 0.0670, -0.0269, -0.0901]],
[[-0.0317, -0.0924, -0.0835],
[-0.0371, -0.0621, -0.0571],
[ 0.0419, -0.0136, -0.0048]]],
[[[ 0.0549, -0.1230, -0.2763],
[ 0.1341, 0.0635, 0.0336],
[ 0.1700, 0.1907, -0.1219]],
[[ 0.0040, 0.0537, -0.0269],
[-0.0121, 0.0372, 0.0160],
[ 0.0195, 0.0640, 0.0284]],
[[-0.0855, -0.1823, 0.0226],
[ 0.1122, -0.0957, -0.1148],
[ 0.1138, 0.1682, 0.0735]],
...,
[[-0.0453, 0.0282, -0.0694],
[-0.1698, 0.0026, 0.0088],
[-0.3632, -0.0807, 0.1443]],
[[-0.1075, 0.0150, 0.0209],
[-0.0763, 0.1657, 0.1336],
[-0.3297, 0.0165, 0.0971]],
[[ 0.1786, 0.1859, -0.0310],
[-0.2875, 0.0106, -0.1045],
[-0.4498, -0.3054, -0.1084]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([ 0.1130, 0.1152, 0.0315, -0.1288, 0.0644, 0.0508, 0.0400, 0.0723,
-0.1249, -0.0800, -0.0459, 0.0949, 0.1108, 0.0988, -0.0662, -0.0987,
-0.0170, 0.0601, 0.1318, 0.3095, 0.0074, 0.0682, -0.0171, 0.0375,
0.0979, -0.0702, 0.0157, -0.1242, -0.1068, 0.1619, -0.1319, 0.0143],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[ 5.5958e-02, -6.7549e-02, 1.3011e-01],
[ 3.1869e-02, 7.1756e-02, 4.3621e-02],
[-1.5157e-02, 1.0986e-01, 2.4258e-02]],
[[ 5.6681e-03, 7.2452e-03, 8.9536e-02],
[-1.1133e-01, -8.7962e-02, 1.1118e-01],
[-6.1623e-02, 9.4808e-03, 9.7563e-02]],
[[-8.0647e-02, 2.3402e-02, -2.1794e-02],
[ 3.5812e-02, 1.5571e-01, 7.5542e-02],
[-5.9498e-02, 1.7165e-01, 4.6333e-02]],
...,
[[ 2.1560e-02, 3.9646e-02, 2.5931e-02],
[-5.8635e-02, -1.2448e-03, -9.2409e-02],
[-1.3364e-01, -1.4649e-01, -1.1705e-01]],
[[-2.4937e-02, -1.0318e-01, -1.5741e-01],
[-4.8995e-02, -3.3736e-02, 3.0916e-02],
[-8.2383e-02, 8.7884e-02, 1.0640e-01]],
[[ 1.2759e-02, -4.5613e-03, -1.3037e-02],
[ 9.0928e-02, -3.8996e-02, -8.5045e-02],
[ 1.2036e-01, -1.8109e-01, -2.5315e-03]]],
[[[ 8.2924e-02, 8.0235e-04, -1.6311e-01],
[-1.4288e-02, 1.4700e-01, 2.0330e-01],
[-1.7924e-04, 1.4535e-01, 2.6015e-02]],
[[-1.6707e-01, -2.7713e-02, -4.2694e-02],
[ 1.2413e-01, 2.1223e-01, 7.2487e-02],
[ 1.7620e-01, 2.1071e-01, -1.6861e-01]],
[[-9.6547e-02, -2.9143e-01, -3.7331e-02],
[-3.1891e-03, -2.0522e-01, 1.5329e-01],
[-7.1845e-02, -2.2165e-01, -9.6214e-02]],
...,
[[ 3.5885e-02, -1.8335e-02, -8.6357e-02],
[-2.8673e-02, -3.0723e-01, -1.5975e-01],
[ 1.1586e-02, -9.7061e-02, 1.3607e-01]],
[[-4.7306e-02, 5.3077e-02, -5.3397e-03],
[ 5.6006e-02, -5.5976e-02, -1.9414e-02],
[ 3.7148e-02, 2.3441e-02, -5.9149e-02]],
[[ 2.0346e-02, -2.0273e-02, 5.0640e-02],
[ 1.7510e-01, -1.2105e-01, 2.7413e-02],
[ 7.1200e-02, -3.4669e-02, 9.9515e-02]]],
[[[-1.1813e-01, -4.3284e-02, 1.1073e-01],
[-2.4984e-01, 3.1868e-02, -2.7299e-02],
[ 2.5976e-02, 5.3411e-02, -5.3951e-02]],
[[ 1.1381e-01, 1.8574e-01, 2.4735e-01],
[-9.6871e-02, 8.1603e-02, 9.8340e-02],
[-2.2372e-01, -1.4453e-01, -1.9953e-01]],
[[-6.0054e-02, -1.0982e-01, 6.6172e-02],
[ 3.0698e-02, 2.4819e-03, -5.3467e-02],
[-7.1686e-02, -7.8474e-02, -1.2287e-01]],
...,
[[-1.1754e-02, -1.2776e-01, -1.8665e-01],
[ 1.8800e-01, 8.5006e-02, -2.3573e-01],
[ 2.1475e-01, 2.3866e-01, -5.2620e-02]],
[[-1.3040e-02, 2.4500e-02, -1.0591e-01],
[ 3.4803e-02, 8.2127e-02, 4.8160e-02],
[-6.1301e-02, -2.6725e-02, 9.0244e-04]],
[[ 1.3781e-01, -4.7976e-02, 1.6304e-02],
[ 1.5854e-01, -5.0540e-02, -2.7939e-01],
[ 1.4749e-01, -3.2710e-01, -4.4184e-01]]],
...,
[[[-1.5312e-01, -5.6534e-02, -7.8072e-02],
[-5.1828e-02, -1.4658e-02, 8.6517e-02],
[-1.4458e-01, 3.5540e-01, 1.9153e-01]],
[[ 2.2136e-01, 9.2169e-02, -1.1710e-01],
[ 7.5876e-02, -4.4069e-02, 1.6972e-02],
[-8.2731e-02, 7.6705e-02, 2.6090e-02]],
[[ 8.5050e-02, -7.0590e-02, -8.9639e-02],
[ 6.3257e-02, -1.1094e-01, -6.6999e-02],
[-2.1932e-02, -1.6830e-01, -2.6389e-01]],
...,
[[-4.7947e-02, 1.6192e-01, 1.4710e-01],
[ 1.5371e-01, -4.9146e-02, 1.3022e-01],
[ 7.8652e-02, 2.5672e-02, 2.0013e-01]],
[[ 1.4728e-01, 1.3835e-02, -3.0275e-02],
[ 3.9367e-02, 1.0774e-01, 8.0746e-02],
[ 5.8084e-02, 1.5345e-02, 1.2945e-01]],
[[ 1.9668e-01, 1.2630e-02, 1.8651e-02],
[ 1.6705e-01, -3.3455e-02, 4.3083e-02],
[ 1.6102e-02, 5.5474e-02, 9.7189e-02]]],
[[[ 2.4338e-02, -2.7769e-02, 1.4842e-01],
[ 3.7368e-03, 1.2347e-01, 1.4978e-01],
[ 1.7625e-01, 1.8100e-01, -5.4143e-02]],
[[-1.7692e-01, -2.0018e-02, 9.0229e-02],
[-7.0086e-02, 1.0260e-01, 5.9354e-02],
[-2.7649e-02, 2.0677e-01, 8.7401e-02]],
[[-7.7725e-02, -8.0529e-02, 8.1935e-02],
[ 1.2645e-01, 7.3619e-02, 9.7543e-02],
[ 1.3179e-01, 7.8238e-02, 8.6675e-03]],
...,
[[ 7.9849e-03, -2.6402e-02, -1.4254e-01],
[-1.7577e-01, -1.7378e-01, 9.8662e-04],
[-4.9918e-02, -2.7512e-01, 1.1932e-01]],
[[ 1.2746e-02, -5.9782e-02, -3.9240e-02],
[ 1.7437e-02, -6.0933e-02, -7.1757e-03],
[-1.0974e-01, -5.0014e-02, 1.6041e-02]],
[[ 3.2547e-01, 6.8285e-02, 6.3309e-03],
[ 1.9893e-01, -9.5113e-02, -3.5472e-02],
[ 3.8229e-02, -5.9324e-02, 8.9656e-03]]],
[[[-2.1515e-02, -9.7407e-03, -6.5288e-02],
[-3.7627e-02, -5.9298e-03, 6.0308e-03],
[ 8.6513e-02, -5.5450e-03, 5.1465e-02]],
[[-1.1005e-01, -4.0290e-03, -1.5989e-02],
[-2.4681e-02, 1.1117e-02, -8.3498e-02],
[ 7.2573e-02, -1.5542e-02, -5.0294e-02]],
[[ 3.2163e-02, 1.6605e-02, 2.1848e-02],
[-4.1785e-02, -7.5831e-02, -2.5275e-05],
[-1.2588e-02, -6.2213e-02, -5.2197e-02]],
...,
[[ 6.7993e-02, 2.2777e-02, 1.3932e-02],
[-3.8710e-02, -4.4840e-02, -2.0280e-02],
[-1.0731e-01, -8.1583e-02, -4.9713e-02]],
[[ 6.6445e-03, -4.2070e-02, -3.4124e-02],
[ 5.7871e-02, -2.2371e-02, -4.4999e-02],
[-5.3202e-03, 1.4200e-04, -1.0394e-03]],
[[ 3.4549e-02, -4.5027e-02, 1.4511e-03],
[ 1.4129e-02, -8.4619e-02, -1.5232e-02],
[-7.0600e-03, -6.8676e-02, 1.3396e-02]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([ 0.0938, -0.0186, 0.0673, 0.1424, 0.1567, -0.0595, 0.1011, -0.0042,
-0.0206, 0.0698, 0.1712, -0.0986, 0.2093, -0.0358, 0.0260, -0.1218,
0.2124, -0.0241, 0.0100, -0.0765, 0.0821, 0.1533, 0.0550, 0.0979,
0.1328, 0.0726, 0.1133, -0.1365, -0.0176, 0.0129, 0.0936, -0.1106],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[ 2.8270e-02, 6.7389e-02, 1.9074e-01],
[-2.5164e-02, -1.8141e-02, 9.3509e-02],
[ 3.3765e-03, -1.0162e-01, -2.5649e-02]],
[[ 2.6196e-02, -3.0901e-02, 1.4578e-02],
[-4.4347e-02, 4.6544e-02, 7.5095e-02],
[-1.2187e-01, -6.1693e-02, 4.8179e-02]],
[[ 1.0838e-02, -1.4909e-02, -2.8724e-02],
[-1.4645e-01, -1.4782e-01, 1.5565e-02],
[-9.7903e-02, 1.5346e-01, 2.2026e-01]],
...,
[[-1.8798e-01, -2.2820e-01, -1.3940e-01],
[ 1.5380e-02, -1.2667e-01, -3.1829e-02],
[ 3.5897e-02, 4.1437e-02, -2.4189e-02]],
[[ 2.3440e-01, 9.2598e-02, -1.9740e-02],
[ 1.3069e-01, 1.1339e-02, -9.3378e-02],
[ 2.4577e-01, -1.4725e-01, -1.3302e-01]],
[[ 4.1789e-02, 1.1831e-01, 1.4511e-02],
[ 1.8912e-02, -4.2863e-02, 8.6712e-02],
[ 5.6411e-02, -3.8415e-02, -1.7197e-02]]],
[[[-4.7449e-02, 1.9930e-02, -2.8154e-02],
[-9.9053e-02, -7.8769e-02, -4.0066e-02],
[-5.8217e-02, -4.0862e-02, -1.0211e-01]],
[[-6.2171e-02, -3.0592e-02, -2.5818e-02],
[-8.5964e-02, -6.2660e-02, -2.6602e-02],
[-1.3669e-02, -2.0996e-02, 3.2590e-03]],
[[-7.2246e-02, 3.3144e-02, -1.9277e-03],
[-3.7280e-02, -2.8847e-02, -7.9831e-02],
[-4.1603e-04, 3.3208e-02, -8.2204e-02]],
...,
[[ 1.4969e-02, -4.0273e-02, 8.2213e-03],
[-3.6660e-02, 3.8408e-02, -6.2844e-02],
[-7.4657e-02, -1.0883e-01, -5.2103e-02]],
[[-6.7354e-03, -1.9265e-02, -5.8257e-02],
[ 2.4807e-02, -3.5853e-02, -1.3069e-01],
[-1.3521e-02, 7.3799e-03, 1.7661e-02]],
[[ 5.6477e-02, -5.6434e-02, 5.6030e-02],
[ 2.6064e-02, 2.0491e-02, -8.3650e-04],
[-1.2100e-02, 8.2095e-03, -3.4005e-03]]],
[[[-4.9736e-02, 7.6146e-02, -8.4301e-02],
[-5.6683e-02, 6.9797e-02, 5.1772e-02],
[-6.9100e-02, 7.0247e-02, 1.1405e-01]],
[[ 9.3135e-03, -6.9978e-03, -5.1646e-03],
[ 9.8982e-02, 2.3798e-02, -2.5621e-02],
[ 1.4338e-01, 1.3492e-01, 4.0257e-02]],
[[ 1.2549e-01, 4.1868e-02, 3.5600e-01],
[-1.2872e-02, -1.8791e-01, -3.7157e-02],
[-8.0403e-02, -3.4126e-01, -1.3987e-01]],
...,
[[-7.1260e-02, 5.6742e-02, -8.1344e-02],
[-9.1587e-02, -1.5919e-01, -6.4815e-02],
[-1.4132e-01, 7.5443e-02, 9.3530e-02]],
[[ 1.8701e-01, 1.8295e-01, 2.2250e-01],
[ 1.5356e-01, 1.2104e-01, -2.0846e-02],
[ 1.2512e-01, 1.3776e-01, -2.4146e-01]],
[[-2.9048e-03, 3.5234e-03, -5.0334e-02],
[-6.2574e-03, -3.1852e-02, 3.7211e-02],
[ 6.8676e-02, 1.5386e-02, 4.7132e-02]]],
...,
[[[-2.1945e-02, 1.0264e-01, 1.1659e-02],
[ 6.6903e-02, 5.1489e-02, -5.3984e-02],
[-3.3835e-03, 4.0481e-02, -3.6617e-02]],
[[-1.1794e-02, -3.3418e-02, -1.5726e-01],
[ 5.0398e-02, 4.7861e-02, -2.7108e-01],
[ 6.4967e-02, -1.3803e-02, -2.2667e-01]],
[[-1.5892e-01, -1.1207e-01, -1.9106e-01],
[-1.5308e-01, -2.0264e-01, -6.2059e-02],
[-1.5674e-01, -2.5154e-01, 1.0897e-01]],
...,
[[-9.8918e-02, -3.0980e-02, 3.3674e-02],
[-2.0841e-02, 1.6261e-01, 7.0855e-02],
[ 2.7551e-02, 2.1583e-01, 7.7446e-02]],
[[ 3.0838e-02, -1.0673e-01, -1.6609e-01],
[ 6.9302e-02, 1.0135e-01, -3.3467e-02],
[-1.1231e-02, 6.5810e-02, -1.7888e-01]],
[[ 9.9306e-03, -2.0178e-02, 2.8000e-02],
[ 6.0467e-02, 1.6776e-02, -3.2710e-02],
[-7.4629e-02, -4.6681e-02, 1.4501e-03]]],
[[[-4.1287e-02, 5.9342e-03, -9.1519e-02],
[-1.1984e-02, 2.1255e-02, -1.0354e-01],
[ 1.6273e-02, 6.2455e-02, 7.9290e-02]],
[[ 8.1780e-02, -1.0797e-01, -1.4939e-01],
[ 4.2252e-02, -4.2067e-02, 1.9768e-01],
[-1.2965e-01, 6.5521e-02, 9.4705e-02]],
[[-6.7421e-02, -1.3568e-01, -1.1555e-01],
[-7.6964e-02, 2.0623e-02, -1.6203e-01],
[-2.7130e-02, 2.8049e-01, 1.3423e-02]],
...,
[[-1.0558e-01, 8.2344e-03, -1.0613e-01],
[ 5.0182e-02, -1.9723e-01, -2.8878e-01],
[-9.3120e-02, 1.0823e-01, -5.7690e-02]],
[[ 3.0902e-01, -8.8460e-02, -3.0601e-01],
[-8.8197e-02, -3.0515e-01, 9.7461e-03],
[-2.9321e-01, -1.2062e-01, 1.1496e-01]],
[[ 2.4546e-02, 3.3540e-02, 3.4460e-02],
[-4.6374e-02, 3.9135e-02, 6.3110e-02],
[ 3.5946e-02, 4.3615e-03, 1.3563e-02]]],
[[[ 1.7977e-01, 1.5571e-01, 4.9108e-02],
[ 3.3545e-02, -4.3655e-02, -3.3421e-03],
[ 3.6491e-02, -1.6855e-01, -3.1917e-02]],
[[-5.8465e-02, -2.1956e-02, 4.4429e-02],
[ 1.7495e-01, 2.6260e-01, 1.3241e-01],
[ 5.3135e-02, 5.3929e-02, 6.3404e-02]],
[[-2.1868e-01, -4.3288e-01, -1.8874e-01],
[-2.9160e-02, 4.7375e-03, -1.4923e-01],
[ 1.4910e-01, 1.3238e-01, -5.5003e-02]],
...,
[[ 4.9776e-03, -1.7780e-01, -1.9658e-01],
[-5.0282e-02, -6.4640e-02, -1.1419e-02],
[ 1.6357e-01, 1.8752e-02, 2.7642e-02]],
[[-1.4704e-01, -4.6700e-01, -3.1448e-01],
[-1.6129e-01, 7.0736e-02, 3.3586e-02],
[ 7.0179e-02, 2.2433e-01, 7.0851e-02]],
[[-1.3386e-02, 2.8940e-02, 4.3044e-02],
[-3.8452e-02, -3.1423e-02, -8.4808e-02],
[-2.9714e-03, -8.6031e-02, 1.0228e-01]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([ 0.1307, -0.1241, -0.0187, 0.0867, 0.0434, -0.2133, 0.0392, 0.1379,
0.0228, 0.0251, 0.0103, -0.1062, -0.0595, 0.0962, -0.0945, -0.0301,
-0.0018, 0.0653, -0.0445, 0.0145, 0.1300, -0.0928, -0.0777, 0.1001,
0.0585, 0.0386, -0.0575, 0.2208, -0.1041, 0.1499, 0.0253, -0.0107],
device='cuda:0')),
('fc_layers.0.weight',
tensor([[-0.0244, 0.0267, 0.0270, ..., 0.0157, 0.0193, -0.0142],
[ 0.0087, 0.0270, -0.0063, ..., 0.0058, -0.0175, -0.0231],
[ 0.0022, -0.0609, -0.0227, ..., -0.0505, -0.0256, -0.0400],
...,
[-0.0618, -0.0828, -0.0770, ..., 0.0258, 0.1417, 0.0561],
[ 0.0068, -0.0130, 0.0356, ..., 0.0373, -0.0017, -0.0202],
[ 0.0126, 0.0072, -0.0197, ..., 0.0083, 0.0003, 0.0215]],
device='cuda:0')),
('fc_layers.0.bias',
tensor([ 2.5713e-02, -3.8386e-02, -2.1505e-02, 9.6669e-02, -2.2777e-02,
-3.9201e-02, -1.3875e-02, -7.3476e-03, -5.4979e-03, 8.2606e-02,
9.3751e-02, -2.4733e-02, -8.8775e-02, -6.5881e-02, -9.5692e-02,
-8.8433e-02, 6.6816e-02, -4.0443e-03, -5.4195e-02, -2.4419e-02,
1.2113e-03, 1.4501e-01, 1.2143e-02, -4.7318e-02, 6.1969e-02,
6.6216e-02, 4.5539e-02, 3.5170e-02, -2.9307e-02, -4.0119e-02,
-5.8424e-02, -2.8853e-02, -2.0011e-02, -1.7619e-02, 8.6024e-02,
-3.1481e-02, -9.2497e-02, -3.9325e-02, 9.5656e-02, -3.7236e-02,
-6.0469e-02, -5.2750e-02, 1.6506e-01, 2.1306e-02, 6.6426e-02,
-7.1758e-02, 2.6032e-02, 1.0640e-01, 1.4393e-01, -7.7481e-03,
8.6108e-02, 2.1568e-01, 4.2596e-02, -6.8291e-02, 2.2031e-01,
-2.9327e-02, 5.6808e-02, -3.5776e-02, 2.7174e-02, 3.5673e-02,
-6.0004e-02, -1.5451e-02, -1.0571e-02, 8.0149e-02, -4.0886e-02,
1.4160e-02, 1.9365e-02, 5.2900e-03, -9.0261e-02, -1.0796e-02,
7.1974e-02, -9.5056e-03, -5.4276e-02, -3.9622e-02, -2.1161e-02,
-5.0565e-02, 3.5423e-02, 2.0539e-02, -4.8531e-02, 2.6591e-03,
1.3559e-01, -1.1233e-01, -7.1153e-02, -1.9577e-01, 1.8864e-03,
-6.3811e-02, -5.1104e-02, -6.1472e-02, -2.2709e-02, -1.2151e-02,
-9.4725e-02, 1.8433e-01, 2.6950e-02, 1.9134e-01, 2.7926e-02,
1.9167e-02, 2.6909e-02, 7.8067e-02, 1.8746e-01, 6.0807e-02,
3.4827e-02, 9.6345e-02, -5.1771e-02, -8.1161e-02, 8.6313e-03,
-1.2000e-02, -9.6219e-03, 4.6318e-02, -2.9510e-02, 1.3238e-01,
-1.9524e-02, -9.1719e-02, 6.4760e-02, 9.2120e-02, -5.1569e-02,
4.6162e-02, -4.8101e-02, -6.6976e-02, 2.2363e-01, -6.5884e-02,
-7.2796e-02, -6.0545e-02, -4.6251e-02, -2.2070e-04, -7.5365e-02,
1.2545e-01, -6.2776e-02, -9.4077e-02, 5.6381e-02, -5.5477e-02,
2.7173e-02, 7.9119e-02, 1.3784e-01, -1.6937e-01, -5.8915e-02,
7.0758e-02, 3.2741e-01, -6.5039e-02, -4.4362e-02, 1.5234e-02,
-2.0981e-02, 1.5026e-01, -7.3234e-02, -1.1233e-02, 5.6755e-02,
1.5915e-02, 9.4886e-03, 4.7583e-02, 1.8000e-01, -2.0579e-02,
1.2143e-02, 1.5093e-02, 4.1720e-02, -1.2729e-02, -7.6090e-03,
1.0517e-02, -2.4199e-02, 1.8465e-01, 9.7777e-03, 4.5943e-02,
-9.8300e-04, -9.5582e-02, -2.8716e-03, -3.4332e-02, 1.0202e-02,
-5.6814e-02, -2.4656e-02, 1.9470e-01, -6.1169e-02, -2.1823e-02,
-5.1731e-02, 2.8945e-02, 2.0283e-03, -3.6530e-02, -6.1033e-02,
-2.7156e-02, -6.3256e-02, 3.9931e-02, 2.6222e-01, -4.7225e-02,
9.6369e-02, 2.2263e-02, -3.2335e-02, 1.5835e-01, -9.4725e-03,
-1.4398e-01, 1.6508e-03, -2.1408e-02, -7.5987e-02, 2.8972e-02,
3.3491e-02, 1.5486e-01, 4.4476e-02, -1.6289e-01, -3.4882e-02,
1.0963e-02, -5.0288e-02, -2.4616e-02, 9.8551e-02, -4.2027e-02,
6.3359e-03, 1.1905e-01, 1.0465e-02, -2.4949e-03, -5.3980e-03,
-9.2694e-02, 6.4298e-03, -1.1508e-02, 2.9210e-02, 4.3362e-02,
-9.6143e-02, -1.3210e-02, -1.0902e-02, -4.7295e-02, -2.3830e-02,
1.3856e-02, -1.8069e-02, -6.6170e-03, -6.0313e-02, 7.5020e-02,
-4.8331e-02, 1.5008e-02, 5.1265e-02, -4.4178e-02, 1.1304e-01,
1.1485e-02, -3.7233e-02, -1.6308e-02, -2.9064e-02, 1.9966e-01,
-6.3124e-02, 1.0956e-02, -8.8191e-02, 1.3153e-02, 1.5498e-01,
2.7031e-02, -5.3223e-02, -9.4008e-02, -3.4935e-02, -6.1062e-02,
-6.7642e-02, 2.9127e-01, -5.7849e-02, -4.1842e-02, -4.9554e-02,
1.2501e-01, -5.7460e-02, 1.9408e-03, -8.4992e-02, 8.0606e-02,
1.6167e-02, 9.7728e-03, -4.8930e-02, -4.6566e-02, 3.4159e-02,
2.8649e-02], device='cuda:0')),
('fc_layers.2.weight',
tensor([[ 0.0205, -0.0331, 0.0050, ..., 0.0403, 0.0092, 0.0065],
[ 0.0405, -0.0090, -0.0464, ..., 0.0062, -0.0016, -0.0266],
[ 0.0048, 0.0054, -0.0203, ..., -0.0831, 0.0045, 0.0445],
...,
[ 0.0464, -0.0170, -0.0267, ..., -0.0121, -0.0534, 0.0206],
[ 0.0496, 0.0201, 0.0345, ..., 0.0475, 0.0485, -0.0434],
[ 0.0426, -0.0527, 0.0143, ..., -0.2103, -0.0544, 0.0211]],
device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.1215, 0.0213, 0.1764, 0.1075, 0.0843, -0.0419, -0.1302, -0.1725,
-0.0300, -0.1542], device='cuda:0'))])},
{'ratio': 0.75,
'bias': 64,
'train_losses': [282.3105743432752,
213.08632250689294,
172.38610815585804,
156.59928004945553,
149.01109307785933,
142.81922983030998,
138.83631498568138,
135.27803868398615,
133.22261770171022,
131.0526924144743,
130.4036550043349,
127.10321954636466,
127.14968881026613,
125.73432113633314,
124.10175753607176],
'test_losses': [251.090467116412,
180.53584805656882,
166.43964981565287,
152.40343907767652,
143.11276224080254,
140.6232165261811,
134.98685129016053,
128.20471950605804,
129.02770823590896,
124.64887664364834,
126.07014398948819,
122.82390867495069,
123.45016192454918,
121.71845151863846,
117.97983511756448],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[-0.1573, 0.0778, 0.1978],
[ 0.0332, 0.0835, 0.2186],
[-0.1509, -0.1409, -0.2103]],
[[-0.1777, 0.1747, 0.3068],
[ 0.1667, -0.0941, 0.1935],
[-0.3072, 0.0800, -0.0913]],
[[-0.1032, 0.0592, 0.1410],
[-0.0175, -0.1714, -0.0040],
[-0.1066, 0.0304, -0.0149]]],
[[[ 0.1519, -0.1282, 0.0511],
[ 0.0637, 0.0271, 0.0488],
[ 0.1422, -0.2401, 0.1696]],
[[ 0.0450, 0.1486, -0.0582],
[-0.3086, -0.1871, 0.2005],
[-0.3121, -0.1822, 0.2438]],
[[ 0.0734, -0.0341, -0.0608],
[ 0.0702, 0.0154, 0.0032],
[-0.0745, -0.1720, 0.1552]]],
[[[-0.0838, -0.2740, -0.1837],
[ 0.1406, -0.0566, 0.0803],
[-0.0081, 0.2596, -0.0660]],
[[ 0.0263, 0.1867, 0.1233],
[ 0.2449, -0.1500, -0.2818],
[ 0.2929, 0.1475, -0.1353]],
[[ 0.1195, 0.1324, -0.0230],
[ 0.1317, -0.1493, -0.2336],
[-0.0812, -0.0443, -0.1370]]],
[[[ 0.1329, 0.2195, -0.0251],
[-0.0158, 0.1050, 0.0726],
[-0.0994, -0.1816, -0.2461]],
[[ 0.2514, 0.0569, 0.1014],
[-0.1232, 0.0367, 0.0390],
[-0.1792, -0.2427, -0.0030]],
[[ 0.1275, 0.1037, 0.1222],
[ 0.1173, -0.1033, 0.0046],
[-0.1273, -0.2270, 0.0986]]],
[[[ 0.0160, -0.1760, -0.1737],
[ 0.0940, 0.1174, -0.1128],
[-0.0905, -0.0124, 0.1512]],
[[-0.1353, -0.1769, -0.1095],
[ 0.0634, 0.2469, 0.0547],
[-0.0021, 0.2786, 0.2533]],
[[-0.0277, -0.2409, -0.0236],
[-0.1711, 0.1150, 0.0707],
[ 0.0304, 0.0773, -0.0782]]],
[[[-0.1869, -0.1425, -0.1499],
[-0.0071, -0.0450, -0.0923],
[ 0.1603, 0.2608, 0.2351]],
[[-0.1821, -0.0195, -0.1987],
[ 0.0873, -0.1876, -0.0352],
[-0.0464, 0.2012, 0.1509]],
[[ 0.0101, -0.0991, 0.0802],
[-0.0616, 0.0592, 0.0651],
[-0.0388, -0.0395, 0.2524]]],
[[[ 0.1764, -0.1016, -0.1194],
[ 0.1056, 0.1245, -0.1977],
[ 0.1424, -0.0317, -0.0978]],
[[ 0.1409, 0.0076, -0.0981],
[ 0.1507, -0.0022, -0.1769],
[ 0.1139, 0.1157, -0.2277]],
[[ 0.1526, 0.1116, -0.0261],
[ 0.0863, -0.0961, 0.1342],
[ 0.0088, -0.1802, -0.1787]]],
[[[-0.1517, -0.0395, 0.2120],
[-0.1757, 0.0335, 0.0377],
[-0.0503, 0.0428, 0.0796]],
[[-0.1638, -0.0702, 0.2729],
[-0.2217, -0.1026, 0.1500],
[-0.1605, -0.0636, 0.2766]],
[[ 0.0811, 0.1100, 0.0548],
[ 0.0181, -0.0863, -0.1054],
[-0.0232, -0.1230, 0.1754]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([ 0.2315, -0.5714, -0.4021, 0.1660, 0.2827, 0.1461, 0.2445, 0.1513],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[-2.4900e-01, -8.9075e-02, -3.1127e-01],
[ 3.6400e-02, 6.7422e-02, -1.0054e-01],
[ 1.8014e-01, 1.3257e-01, -1.8099e-01]],
[[ 5.4885e-02, -2.7508e-01, -4.8643e-02],
[ 1.5805e-02, -1.1825e-01, -2.6722e-01],
[-5.1540e-02, 2.6864e-01, 2.7916e-03]],
[[-2.0567e-02, 5.0791e-02, -2.9063e-01],
[-2.4348e-01, -5.2383e-02, -2.7203e-01],
[-1.6835e-01, 6.4012e-02, -3.9177e-01]],
...,
[[ 1.4842e-01, 8.1555e-02, 1.4725e-02],
[ 4.6726e-02, 4.6714e-02, -1.1435e-01],
[-5.6575e-02, -1.4816e-01, -1.4007e-01]],
[[-1.3085e-01, 1.2574e-01, 1.7687e-01],
[-9.0450e-02, -6.2349e-02, 1.1309e-01],
[-1.5802e-01, 3.3736e-02, 1.5279e-01]],
[[ 1.5237e-01, -1.2907e-01, -2.5960e-01],
[ 2.6492e-01, -1.3068e-02, -2.7536e-01],
[ 4.0924e-02, -7.8733e-02, -3.4171e-01]]],
[[[ 1.4332e-01, -1.2597e-01, -1.9721e-01],
[-1.5502e-01, -1.2917e-01, -2.1747e-01],
[-1.3556e-01, -2.1579e-01, -2.1909e-01]],
[[ 1.7105e-01, 1.4519e-01, 2.0794e-01],
[ 1.4145e-01, -5.4229e-02, -1.0727e-01],
[ 2.1054e-01, -2.9617e-02, -4.0633e-02]],
[[ 1.5133e-02, -1.4892e-02, -6.8367e-02],
[-5.1716e-03, 3.8057e-02, -4.5854e-02],
[-5.1914e-02, 7.9401e-03, -5.3652e-02]],
...,
[[-7.5061e-02, -6.9729e-04, 1.5389e-01],
[ 9.4961e-02, 7.8632e-02, 1.6167e-01],
[ 8.2812e-02, 1.7641e-01, 8.7408e-02]],
[[ 1.0995e-01, 3.9236e-02, 8.9873e-02],
[ 5.8021e-02, -5.0669e-02, -3.2334e-03],
[ 9.3293e-02, -5.2917e-02, 4.9174e-02]],
[[-1.9506e-01, -7.1721e-02, 1.1412e-01],
[-8.7298e-02, 2.3183e-02, 9.7281e-02],
[-2.7346e-01, -8.3730e-02, 1.5157e-01]]],
[[[-9.0648e-02, 2.5956e-04, 7.1992e-02],
[ 3.3305e-02, -9.5576e-02, 1.9193e-03],
[ 5.7457e-03, 6.7979e-03, 2.5870e-03]],
[[-4.4306e-02, 9.2109e-02, -4.5774e-03],
[-4.6264e-02, -2.7651e-02, -4.6059e-02],
[ 5.2954e-03, -3.0941e-02, 1.2771e-01]],
[[ 4.2998e-03, 2.5455e-01, 1.9123e-01],
[-1.1553e-02, 1.0715e-01, -6.4057e-02],
[-7.8716e-03, -5.2301e-02, -1.8997e-01]],
...,
[[-6.2975e-02, -1.1913e-01, 1.1310e-02],
[-3.6983e-01, -1.4696e-01, 9.0147e-02],
[-2.5227e-01, -7.3420e-02, -1.6686e-02]],
[[ 1.4204e-02, 1.8428e-02, 2.0674e-01],
[-1.9104e-01, -1.1041e-01, 7.5094e-02],
[-9.3239e-02, 2.1184e-03, 9.4927e-02]],
[[-7.5948e-02, -9.9759e-02, -3.3186e-02],
[ 1.4282e-01, -9.3450e-02, -2.1511e-01],
[-3.9430e-02, -6.5974e-02, 3.1285e-02]]],
...,
[[[ 1.5303e-01, 3.4511e-02, 1.0100e-01],
[-4.4355e-02, -6.2023e-02, 4.2143e-02],
[ 6.7444e-02, -1.7487e-01, 7.3178e-03]],
[[-1.8479e-01, -3.5368e-01, -2.4677e-01],
[-5.6344e-02, -3.3165e-01, -3.4198e-01],
[-1.5900e-01, -4.6317e-01, -2.2582e-01]],
[[-1.0181e-01, -1.2561e-01, -6.9100e-02],
[-1.5068e-01, -1.3116e-01, -9.8004e-03],
[-2.6830e-02, -1.0349e-01, -1.4150e-01]],
...,
[[-1.6401e-01, -6.0326e-02, -9.2649e-02],
[ 1.2884e-01, -1.3440e-01, -3.3344e-02],
[-1.3642e-01, -2.3318e-02, 7.4720e-02]],
[[-9.7842e-02, 4.1925e-02, 4.9766e-02],
[-4.3930e-02, 9.3536e-03, 9.8605e-02],
[ 7.6573e-04, -8.6310e-02, -2.8361e-02]],
[[ 2.2201e-02, -1.8104e-02, -9.0501e-02],
[ 3.8072e-02, -7.4073e-02, -1.0134e-01],
[ 4.9538e-02, -3.4480e-02, 1.2841e-01]]],
[[[ 1.8243e-02, 1.7479e-01, -2.3812e-02],
[ 6.3072e-02, 1.3801e-01, 2.1169e-01],
[-3.1754e-02, -4.9791e-02, 6.4117e-02]],
[[ 1.0345e-01, 2.0973e-01, 1.2879e-01],
[ 1.3544e-01, 1.6884e-01, 1.7184e-01],
[-1.0320e-01, 2.8532e-02, 3.4088e-01]],
[[ 7.1565e-02, 9.9001e-02, 9.7858e-02],
[ 2.3237e-01, 3.9638e-02, 7.8295e-02],
[ 3.1063e-01, 2.3244e-01, 1.3270e-01]],
...,
[[-6.6723e-03, -4.5081e-02, -1.9124e-02],
[-2.7365e-01, -1.8347e-01, -1.8932e-01],
[ 1.1775e-01, 5.9041e-02, 7.6912e-02]],
[[-6.7779e-02, -3.8539e-02, -1.2247e-01],
[-8.1255e-02, 9.3430e-03, -4.9543e-02],
[-8.2127e-02, -1.7997e-01, -2.8113e-01]],
[[-2.3048e-02, -2.1173e-01, -2.0416e-01],
[-5.0510e-02, -2.6221e-03, 3.3800e-03],
[-5.9892e-02, -4.5530e-02, -1.1417e-01]]],
[[[ 7.8012e-02, -3.9202e-02, 2.8802e-03],
[ 1.5621e-01, 8.6975e-02, 1.2913e-01],
[ 8.7982e-02, 3.1259e-02, -1.1580e-01]],
[[ 2.3454e-01, -1.2781e-01, 6.2833e-02],
[-4.4587e-02, -7.4648e-02, 4.9830e-02],
[-3.0521e-02, 4.0877e-02, 1.7862e-01]],
[[ 4.2563e-02, 2.8327e-01, 2.0265e-01],
[ 2.4789e-01, 8.7264e-02, -3.3841e-02],
[ 1.1799e-01, -1.8770e-01, -1.5149e-01]],
...,
[[ 1.3763e-01, 1.1367e-01, -7.9679e-02],
[ 2.1594e-02, -3.0514e-02, 2.4222e-02],
[-1.8955e-01, -5.3728e-02, 1.4748e-01]],
[[-2.4379e-01, 1.7811e-01, 2.4739e-01],
[-2.0946e-01, 1.4183e-01, 1.1680e-01],
[-2.0846e-01, 6.7804e-02, -5.5328e-02]],
[[ 3.1018e-01, 8.4381e-02, -4.0621e-02],
[ 1.2357e-01, 3.1538e-02, -9.8280e-02],
[ 1.0402e-01, -6.4343e-03, -1.0703e-01]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([ 0.2114, -0.3448, -0.1963, 0.1068, 0.1171, 0.2453, 0.0732, 0.1729,
0.1998, 0.2240, 0.2385, -0.3409, 0.0687, 0.2356, -0.1813, -0.1791],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[-2.7686e-02, -8.1531e-02, -5.4883e-02],
[-5.3868e-02, -8.9294e-02, -6.6121e-02],
[-1.1023e-01, -6.7386e-02, -1.0014e-01]],
[[-1.4094e-02, -5.5835e-02, -8.5540e-02],
[-1.5191e-01, -6.6336e-02, -4.8015e-02],
[-1.5898e-01, -7.4233e-02, -1.5614e-01]],
[[-4.5635e-02, -9.4256e-02, -1.0289e-01],
[-1.0211e-01, -6.7339e-02, -4.0209e-02],
[ 6.6349e-02, 3.0833e-03, -1.9597e-02]],
...,
[[-7.9104e-02, -9.8473e-02, 3.9324e-02],
[-2.4799e-02, -2.4310e-02, 4.9060e-02],
[-1.8020e-03, -4.5408e-02, -8.5803e-02]],
[[-1.6403e-03, -6.9994e-02, -8.9894e-02],
[-4.0532e-02, 3.7427e-02, 3.1319e-02],
[ 1.1899e-01, -7.5652e-02, 1.0272e-02]],
[[-9.2661e-02, -6.6671e-02, -1.2777e-01],
[-7.4206e-02, -3.6820e-02, -1.7230e-02],
[ 7.4691e-02, 5.8660e-02, 2.2312e-02]]],
[[[-3.1457e-02, 4.0036e-03, 3.6620e-02],
[ 1.8935e-01, -2.7336e-01, -2.3625e-02],
[ 2.1002e-01, 1.4286e-01, -5.1610e-02]],
[[ 5.3985e-03, -9.6493e-02, 1.2152e-01],
[-1.2198e-01, -1.9067e-01, -3.9065e-02],
[-1.4361e-01, -1.4489e-01, -2.0274e-01]],
[[ 3.5409e-02, 9.3345e-02, 1.2649e-01],
[-1.2102e-01, -3.7764e-02, 1.7809e-01],
[-3.9514e-02, -1.0465e-01, -2.1694e-02]],
...,
[[-3.7663e-02, 9.2928e-02, 1.1960e-01],
[-6.1210e-02, 1.4104e-01, 6.0940e-02],
[-2.3338e-01, -2.4123e-01, -3.5760e-02]],
[[-7.9435e-02, -2.5208e-01, -1.7586e-01],
[-8.0259e-02, -9.3520e-03, -1.7172e-01],
[ 4.2300e-01, 1.1735e-02, -8.6905e-02]],
[[ 3.8289e-02, 1.0771e-01, 1.4149e-01],
[-8.5833e-02, -3.2704e-01, -6.9112e-02],
[ 1.6324e-02, -7.4929e-02, -4.3653e-02]]],
[[[ 4.7991e-02, -5.1258e-02, -2.7294e-01],
[ 4.3082e-02, 1.5337e-01, -3.1384e-01],
[-2.7141e-02, 1.4213e-01, -7.3455e-03]],
[[-2.4278e-02, -1.7937e-02, 4.5510e-02],
[-7.7801e-03, 2.4701e-02, 2.9694e-02],
[ 5.7538e-02, 6.0416e-02, 2.2607e-02]],
[[-5.9553e-02, -1.0765e-01, 1.4422e-01],
[ 7.1046e-02, -4.8890e-02, -1.6027e-01],
[ 2.0324e-02, 9.8998e-03, -1.8876e-02]],
...,
[[-4.7036e-02, -1.3826e-01, 5.3728e-02],
[-3.4092e-02, 5.8935e-02, 9.6167e-02],
[-2.4479e-02, -8.1073e-02, 9.7600e-03]],
[[ 1.4228e-02, -3.8551e-02, -3.4650e-02],
[ 1.9932e-01, 1.2386e-03, -8.7455e-02],
[ 2.5036e-02, 8.8541e-02, -1.0499e-01]],
[[ 2.0058e-02, -4.8483e-02, 4.2179e-02],
[ 2.7298e-02, 6.1432e-02, -2.8495e-01],
[-6.8329e-02, 1.4000e-01, -3.5746e-02]]],
...,
[[[-7.2705e-02, -9.9587e-02, -2.9261e-02],
[-7.0658e-03, 5.8848e-02, -2.4265e-03],
[-8.8895e-03, -7.0835e-02, -5.9389e-02]],
[[-1.0294e-03, -5.7598e-02, -9.5946e-02],
[-2.6162e-03, 1.4981e-02, 1.0643e-02],
[-9.3985e-03, -3.5405e-02, 2.3284e-02]],
[[-5.5155e-02, 4.3995e-02, 7.6693e-03],
[-9.3182e-02, -2.1084e-02, 7.1782e-02],
[-2.6030e-02, -1.7328e-03, 6.8218e-02]],
...,
[[-5.5837e-02, -8.2052e-02, -5.7057e-02],
[-3.9408e-02, 6.3207e-02, 1.8989e-02],
[-4.0952e-02, -1.7897e-03, -7.4401e-03]],
[[ 3.2322e-02, 2.8738e-03, 1.0283e-02],
[-6.6526e-02, 2.0484e-02, -9.5520e-02],
[ 6.4478e-02, -5.8631e-02, -3.4761e-02]],
[[-3.0080e-02, -1.2278e-01, -8.1370e-02],
[ 4.2813e-02, -1.2671e-01, -5.6456e-04],
[-7.3059e-03, -1.8798e-02, -9.7877e-03]]],
[[[-2.1758e-02, -9.4714e-02, -3.5430e-02],
[-8.1754e-02, 5.6395e-02, -6.2474e-02],
[-5.8845e-02, -8.9067e-02, -6.9078e-02]],
[[-4.5242e-02, -1.8703e-02, -6.3318e-02],
[-5.3725e-02, -8.2329e-02, -7.5969e-02],
[ 2.5200e-02, -7.0097e-02, -1.2607e-01]],
[[-1.5602e-02, -2.2029e-02, -1.9585e-02],
[-4.2807e-02, -2.3352e-02, -8.4931e-05],
[ 3.5948e-02, -6.9533e-02, -6.2785e-02]],
...,
[[ 2.8665e-03, 2.6017e-02, 2.4539e-02],
[-8.9852e-03, 4.5744e-02, -1.8748e-02],
[-1.0820e-01, -3.5562e-02, -2.2236e-02]],
[[-2.0839e-02, -4.8490e-02, 2.6426e-02],
[-1.6717e-02, -2.5362e-02, -6.5632e-02],
[ 1.0201e-02, 2.0727e-02, 3.1012e-02]],
[[ 5.6758e-02, 4.0613e-02, 4.1986e-02],
[-9.2043e-02, -1.2285e-01, -1.4120e-01],
[-1.0734e-01, -1.0671e-01, -2.5254e-02]]],
[[[ 3.9130e-03, -1.4294e-01, -2.1717e-01],
[-1.5753e-02, -6.4105e-02, -1.2506e-01],
[ 1.5304e-01, 2.1619e-01, 1.6827e-01]],
[[-3.7769e-02, -2.0568e-02, 2.3031e-01],
[-1.4728e-01, -1.4570e-01, -3.1767e-02],
[-2.4998e-01, -2.2224e-01, -8.9812e-02]],
[[-2.4173e-02, 6.8706e-02, -4.5938e-03],
[-8.0142e-02, 5.5148e-03, -5.4249e-02],
[-7.0811e-02, -9.8586e-03, -6.4052e-02]],
...,
[[ 8.8885e-03, 7.4779e-02, 1.6386e-01],
[-7.4183e-02, 4.2529e-02, -2.1872e-03],
[-1.7375e-01, -7.8266e-02, -3.2677e-02]],
[[-1.1752e-01, -1.6697e-01, -3.7119e-01],
[ 2.2706e-02, 8.2948e-02, -6.6884e-02],
[ 2.4322e-01, 3.1789e-01, 9.1727e-02]],
[[ 3.8995e-02, -1.2212e-01, -8.4198e-02],
[-2.6290e-02, -1.4698e-01, -1.8993e-01],
[-7.7743e-02, -9.8433e-02, -1.0353e-01]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([-0.0069, -0.0460, -0.0556, 0.1207, -0.1437, 0.0163, -0.1054, -0.0390,
0.0012, 0.1218, 0.1670, 0.0831, -0.0931, -0.0880, 0.0407, -0.0268,
0.0231, 0.0362, 0.0688, -0.0816, -0.1058, 0.2334, 0.2097, 0.0099,
-0.0444, -0.1955, 0.1844, 0.1695, -0.0183, -0.1051, -0.0871, 0.1265],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[ 9.6450e-03, 3.5787e-02, -3.9716e-02],
[-1.5686e-02, -4.1097e-02, 5.0835e-02],
[ 1.6853e-02, -1.1446e-01, 8.6883e-02]],
[[-5.9823e-02, 2.3964e-03, -1.2605e-01],
[-6.1802e-02, -2.0689e-01, -3.7230e-01],
[-1.6754e-01, -1.9923e-01, 4.1851e-02]],
[[ 8.1747e-02, -5.6411e-04, 2.7205e-02],
[ 1.1285e-01, -1.5886e-01, -2.5255e-01],
[-5.6519e-03, -2.0531e-01, -1.4232e-01]],
...,
[[-2.4986e-02, -4.8970e-02, 5.4739e-02],
[ 5.6023e-02, -4.6577e-02, 1.6819e-02],
[-5.7417e-02, 5.0320e-02, 5.1012e-02]],
[[ 7.3986e-02, 3.9157e-02, -1.6041e-02],
[ 3.3317e-02, -1.0846e-02, -5.4661e-02],
[-1.5508e-02, 4.5267e-02, 2.3121e-02]],
[[ 4.3177e-02, -8.0183e-02, -1.7127e-01],
[ 1.3636e-01, 6.5510e-03, -2.1977e-01],
[ 1.1912e-01, -1.4683e-01, -3.2237e-01]]],
[[[ 1.5071e-02, 5.3144e-02, -3.2213e-02],
[ 1.1195e-02, -7.7305e-02, -4.0811e-02],
[-6.6841e-02, 8.5220e-02, 3.7329e-02]],
[[ 1.1969e-02, -2.1430e-01, -1.6687e-02],
[ 1.3021e-01, -1.3382e-02, -1.9838e-01],
[ 1.1128e-01, 2.4257e-01, 1.9423e-02]],
[[ 4.5676e-03, 5.1213e-02, 6.6661e-02],
[-1.0351e-01, -1.9778e-01, 9.6011e-02],
[ 1.0676e-01, -8.6348e-02, -9.4180e-02]],
...,
[[ 1.0443e-01, 1.6819e-02, -1.1096e-02],
[ 5.2381e-02, -6.0012e-02, 4.7568e-02],
[-5.2865e-02, 2.6191e-02, 5.0750e-02]],
[[ 2.1532e-02, 5.5942e-02, 2.6828e-02],
[-1.5828e-02, -6.2559e-02, -6.5172e-03],
[ 6.4682e-03, -1.9744e-02, 1.7167e-02]],
[[-7.9911e-02, -1.0550e-01, 2.5228e-02],
[ 1.5082e-01, -2.7180e-01, -1.5723e-02],
[-3.4565e-02, -7.5809e-03, -7.7978e-02]]],
[[[-1.5994e-02, 6.2364e-02, -5.0388e-02],
[ 3.8119e-03, -2.2616e-02, 4.3696e-02],
[ 4.5019e-02, -4.3787e-02, 3.0538e-02]],
[[-1.0580e-01, -9.7466e-03, -1.9565e-01],
[ 2.8096e-02, 1.7715e-01, 1.4482e-01],
[ 7.0094e-02, -4.6936e-02, 9.2343e-02]],
[[-2.3564e-01, 8.2179e-03, -7.7082e-03],
[-1.5813e-01, 3.7938e-03, -3.4309e-02],
[-1.4026e-01, -3.1599e-02, 3.2899e-02]],
...,
[[ 3.1918e-02, -4.9609e-02, -2.3987e-02],
[-2.4882e-02, 1.9603e-02, -3.7111e-02],
[ 2.5075e-02, 2.6407e-02, -1.9527e-02]],
[[ 4.9861e-02, 2.8368e-02, 2.6219e-02],
[-4.2317e-02, -1.1589e-02, -3.0936e-02],
[-4.1466e-02, 4.9057e-02, -5.8585e-02]],
[[-1.9136e-01, -1.4080e-01, 7.1116e-02],
[ 6.3645e-02, 3.8156e-02, 3.1311e-02],
[-1.1172e-02, 1.1070e-01, 1.4295e-01]]],
...,
[[[-7.6480e-03, 2.0390e-02, -9.5574e-02],
[ 1.7768e-02, -6.6582e-02, 1.4549e-02],
[ 2.1102e-02, 3.3675e-02, 8.3100e-02]],
[[ 1.4286e-01, -2.9711e-02, -5.5420e-02],
[ 8.9553e-02, 1.5575e-01, -1.3098e-02],
[-5.6687e-02, 7.1617e-02, 8.8474e-02]],
[[ 2.9569e-02, -1.6627e-01, -5.0709e-02],
[ 8.0424e-02, 1.0349e-02, -2.1192e-01],
[ 9.3591e-02, 1.6058e-01, -1.1157e-02]],
...,
[[ 4.7295e-02, -9.9361e-02, -3.5793e-02],
[ 5.0269e-03, 1.2662e-02, 3.4789e-03],
[-4.8780e-02, -4.6051e-02, -2.7032e-02]],
[[-8.1427e-03, -2.7926e-04, -2.9599e-02],
[ 4.0326e-05, 7.5524e-02, 5.4548e-02],
[-3.6798e-02, 6.2050e-03, -5.6940e-02]],
[[ 1.4798e-01, 1.2172e-01, -2.0924e-01],
[ 1.4504e-01, 1.5708e-01, -9.5493e-02],
[ 7.7324e-02, 1.2161e-01, 1.3594e-01]]],
[[[-1.0465e-01, -1.5024e-01, 2.1257e-02],
[-2.6249e-02, -6.1420e-02, -9.9967e-02],
[ 4.2542e-03, 1.0779e-02, -3.7315e-02]],
[[ 8.1589e-02, 1.2304e-01, -1.1126e-02],
[-6.8376e-02, -6.5697e-02, -4.7052e-02],
[-4.5590e-02, -7.9343e-02, -2.0054e-02]],
[[-3.0168e-02, 1.0237e-02, 2.1220e-02],
[ 1.1491e-01, 1.5885e-01, 3.9384e-02],
[ 1.2410e-01, 1.3854e-01, 7.8512e-02]],
...,
[[ 2.0573e-02, 3.3517e-02, -5.8498e-02],
[ 2.8672e-02, 4.0241e-02, -2.6419e-02],
[-5.0943e-02, -2.3053e-02, -1.8649e-02]],
[[-5.4809e-02, 4.2193e-02, -2.2309e-02],
[ 2.6034e-02, 4.1843e-02, -3.8648e-02],
[-5.0864e-02, 6.3618e-02, -5.4494e-02]],
[[-1.1623e-01, -2.1700e-02, -3.7161e-02],
[-1.5946e-01, -1.0767e-01, -2.3403e-02],
[ 7.6454e-02, 3.3854e-02, 7.7329e-02]]],
[[[-1.8224e-02, -4.3888e-02, -2.5522e-03],
[ 5.8678e-02, 7.9547e-02, 5.4023e-02],
[ 8.2712e-02, 1.0044e-01, 5.7825e-02]],
[[-1.9346e-01, -3.0206e-01, -1.9946e-01],
[-1.1963e-01, -2.3704e-01, -1.9921e-01],
[ 2.1518e-02, -2.4062e-02, 4.2463e-02]],
[[-8.8740e-02, -2.8743e-02, -6.5396e-02],
[-1.4110e-02, -5.8128e-03, -1.0460e-01],
[-6.3335e-02, -2.7715e-02, -1.2996e-01]],
...,
[[ 6.0507e-02, 3.4048e-02, -9.6598e-02],
[ 4.5318e-02, 3.6103e-02, 1.4335e-02],
[ 1.8288e-02, 8.2743e-02, -7.3099e-02]],
[[ 5.2362e-02, 2.8558e-03, 1.1466e-02],
[ 5.3704e-02, 3.7266e-02, -5.8319e-03],
[-6.2152e-02, -1.1649e-03, 3.5440e-02]],
[[ 1.2237e-02, -8.3513e-02, -9.8718e-02],
[-1.7154e-01, -2.4988e-01, -1.5259e-01],
[-1.9044e-02, 6.4801e-02, -5.8300e-02]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([-0.0829, -0.0763, -0.0418, 0.0097, 0.0708, 0.2283, 0.0362, -0.0657,
0.0203, -0.0782, -0.0629, 0.0836, -0.0204, 0.1406, -0.0427, -0.0164,
0.0553, -0.2661, -0.0967, -0.0582, -0.0266, 0.1811, -0.0561, 0.0691,
0.0733, 0.0006, -0.0897, -0.1155, -0.0687, 0.0521, -0.1414, -0.0134],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[-4.8763e-03, -2.8588e-01, 2.8125e-02],
[ 1.6322e-01, -2.0118e-01, -1.6916e-01],
[ 7.6919e-02, 8.7453e-02, -6.0565e-02]],
[[ 2.3503e-01, 1.5460e-01, 1.6314e-02],
[ 1.6036e-01, 5.4105e-02, -9.8163e-02],
[ 1.1971e-01, -1.9599e-01, 5.0030e-02]],
[[-1.9185e-01, -1.0529e-01, -1.4240e-02],
[ 1.1149e-01, -6.4777e-02, -6.5297e-02],
[ 1.8572e-02, -1.2486e-01, -1.0997e-01]],
...,
[[ 2.0710e-01, 2.8632e-01, 1.3564e-01],
[ 2.0200e-01, 1.9793e-01, 3.9938e-02],
[ 4.5159e-02, -9.2621e-02, -1.0851e-01]],
[[-1.2179e-01, -1.0971e-01, -7.0411e-03],
[-1.7034e-02, 6.1612e-02, 1.2470e-01],
[-1.3044e-01, 2.5521e-03, 2.4034e-02]],
[[-1.2494e-01, -2.1995e-01, -6.8818e-03],
[-7.0643e-03, -1.2844e-01, 4.3875e-02],
[ 9.8302e-02, 7.5985e-02, 1.7882e-01]]],
[[[ 2.2513e-03, -9.1816e-03, 8.3793e-02],
[-2.1186e-01, -1.1492e-01, 5.1458e-02],
[-3.3749e-01, -1.4829e-01, -2.8825e-02]],
[[-4.6247e-02, -1.1625e-01, -7.7188e-02],
[-7.9753e-02, 1.2402e-01, -4.3056e-02],
[-2.7463e-02, 1.4000e-01, 1.4501e-01]],
[[ 1.1953e-01, 6.5711e-02, -4.3809e-02],
[-8.6959e-02, -7.2712e-02, -2.3988e-02],
[ 2.3301e-01, 2.0996e-01, 9.8399e-02]],
...,
[[-4.5590e-02, -1.7902e-01, -4.8327e-02],
[-3.7112e-02, -6.8452e-04, 2.0066e-01],
[ 1.4533e-02, 6.5934e-02, 1.5876e-02]],
[[ 1.1542e-01, 1.3303e-02, 6.2279e-02],
[-3.6110e-02, 1.9590e-02, -5.2805e-02],
[ 2.5030e-02, -9.3064e-02, -2.4678e-02]],
[[ 1.1273e-01, -2.8319e-02, -1.5582e-01],
[ 1.5322e-01, -2.2997e-02, -8.0321e-02],
[ 4.7962e-02, 5.0308e-02, 5.5790e-02]]],
[[[-3.3453e-02, -1.4216e-01, -6.7920e-02],
[-5.0272e-02, -1.0264e-01, -2.5485e-01],
[ 1.0240e-01, 1.2793e-02, 1.3519e-01]],
[[ 1.0278e-01, 8.6935e-02, 1.7111e-01],
[ 5.2308e-02, 9.1881e-02, 1.1837e-01],
[ 7.5983e-02, 8.6847e-02, -3.4328e-02]],
[[ 6.2811e-02, 5.0537e-02, 5.6583e-02],
[ 9.0131e-02, -1.4990e-01, -1.3903e-01],
[ 1.5199e-02, 7.6829e-02, 5.3415e-02]],
...,
[[ 8.2243e-02, 1.8124e-01, 2.0806e-01],
[-2.2279e-02, 3.9548e-02, 8.3337e-02],
[ 3.4885e-02, 1.0958e-01, 8.7924e-03]],
[[ 1.2127e-01, -8.5603e-02, -2.5983e-01],
[-7.8896e-02, -3.0717e-02, -1.1591e-01],
[-9.4648e-02, 7.7694e-02, -1.7339e-01]],
[[-2.1760e-01, -1.7907e-02, 4.3645e-02],
[-1.9001e-01, -9.3865e-02, -1.6544e-01],
[ 3.0187e-02, -5.7704e-03, 8.3727e-02]]],
...,
[[[-6.8117e-02, 5.0093e-02, -3.7460e-03],
[ 2.4263e-01, 7.2747e-02, 1.3963e-01],
[ 1.1138e-01, 7.7363e-02, 1.0632e-01]],
[[ 7.4664e-02, 1.2076e-01, 7.2254e-02],
[ 1.0313e-01, 1.9495e-01, 2.7438e-02],
[ 3.8359e-02, 1.8452e-01, 1.2678e-01]],
[[-1.2454e-01, -2.7893e-02, -4.8069e-02],
[-8.9141e-02, 7.1455e-02, 4.5898e-02],
[-6.0710e-02, 2.9877e-02, -7.0943e-02]],
...,
[[-1.0030e-01, 9.5921e-02, 1.4683e-01],
[ 1.5019e-02, 2.6460e-01, 6.8473e-02],
[-7.4609e-02, 1.6088e-01, 1.5674e-03]],
[[-1.7942e-02, 6.1068e-02, -1.0227e-01],
[ 4.1401e-02, -9.4385e-02, -1.4693e-01],
[ 1.3247e-02, -9.3659e-03, -8.1683e-02]],
[[ 1.0433e-01, 1.6361e-02, 1.9279e-01],
[-2.9130e-02, -1.8285e-01, 6.7118e-02],
[-6.6057e-02, -2.1653e-01, 2.3346e-03]]],
[[[-1.4601e-01, 5.9782e-02, -1.2498e-01],
[ 1.9106e-02, 8.6248e-02, 2.9300e-01],
[ 9.1838e-02, 5.9028e-02, 2.6008e-01]],
[[-5.1146e-02, 3.1552e-02, 1.6901e-01],
[-1.2986e-01, -4.2218e-02, 1.2390e-01],
[-1.2110e-01, -1.7413e-02, 1.9534e-02]],
[[-3.8217e-02, -1.2222e-02, 3.1557e-02],
[-2.1617e-01, -3.3844e-02, -2.6546e-02],
[-1.1900e-01, -5.8701e-02, 2.2115e-02]],
...,
[[ 4.0210e-02, 3.6365e-02, 2.0597e-02],
[ 4.0915e-02, -4.5964e-02, 7.3233e-02],
[ 1.3395e-01, 3.2963e-02, 2.4542e-02]],
[[-2.8177e-02, -3.8329e-01, -6.4999e-02],
[ 3.4604e-02, -3.8288e-01, -9.3142e-03],
[ 1.2232e-02, -7.6287e-02, 1.1116e-01]],
[[-1.7996e-02, 4.9244e-02, 1.2605e-01],
[-2.5994e-02, 8.8169e-02, 1.2349e-01],
[ 7.7882e-03, -3.3216e-02, -6.4906e-02]]],
[[[ 4.0790e-02, -1.6477e-01, 4.6813e-02],
[ 7.0206e-02, -4.7300e-02, 5.9213e-02],
[-5.5576e-02, -1.8204e-01, 1.1596e-01]],
[[ 9.1296e-02, -1.1657e-01, 5.8085e-03],
[-8.0310e-02, -1.1901e-01, 7.4708e-02],
[-9.0616e-02, 1.6634e-01, 2.3075e-01]],
[[-2.2712e-02, -1.1163e-01, -1.0968e-01],
[ 8.4196e-02, 8.5122e-02, -1.2577e-01],
[ 1.3023e-01, 1.8335e-01, -2.4790e-02]],
...,
[[ 1.8741e-02, -3.8670e-02, -4.2115e-02],
[ 5.5534e-03, 6.9352e-02, 7.3746e-02],
[-1.4021e-02, 8.7032e-02, 1.4793e-01]],
[[ 1.9442e-01, 1.0834e-01, 1.7724e-01],
[-4.5498e-03, 1.1541e-01, -8.5591e-02],
[ 5.4705e-03, -2.9766e-02, 1.3180e-01]],
[[-1.1596e-02, 1.5530e-01, -3.6345e-03],
[-1.5581e-02, -3.4425e-05, 5.3068e-02],
[-6.1299e-02, -7.5575e-02, -2.7796e-02]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([ 0.3051, -0.0140, 0.1727, -0.1113, 0.0728, -0.0676, 0.0779, -0.0136,
0.0139, -0.0949, -0.0927, 0.0667, -0.0858, -0.1205, -0.1606, 0.2338,
-0.1123, -0.0723, 0.0252, 0.1330, 0.2144, 0.0346, 0.0475, 0.1592,
0.2220, 0.1224, -0.1283, -0.1423, 0.0753, -0.0077, -0.0090, 0.1823],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[-1.2252e-01, -6.0800e-02, -2.0181e-01],
[-3.2614e-02, 1.4447e-01, -1.3137e-02],
[ 8.0725e-02, 1.1720e-01, -1.2781e-01]],
[[-2.8414e-01, -2.2645e-01, -1.8305e-01],
[-5.5395e-02, -1.1780e-03, 4.6005e-02],
[-1.4537e-01, -1.8895e-01, -7.9002e-02]],
[[-1.2473e-02, -8.5815e-02, -1.2106e-01],
[ 4.3351e-02, -9.4617e-02, -4.1642e-03],
[-5.6545e-02, -1.9216e-01, -1.2949e-01]],
...,
[[ 4.7241e-02, -1.5338e-01, -2.5422e-01],
[ 9.0465e-02, -1.7224e-01, -9.5400e-02],
[-2.6880e-02, -2.2452e-01, -1.8666e-01]],
[[ 3.9427e-02, -2.8217e-02, 8.3242e-03],
[-4.2257e-02, -1.9139e-02, 1.7200e-02],
[-6.6810e-02, 1.8090e-02, -1.2429e-01]],
[[ 2.6065e-01, 2.8317e-02, -1.0948e-01],
[-1.0738e-01, -2.0845e-01, -2.0503e-01],
[-1.1861e-01, -1.9718e-01, -1.5976e-01]]],
[[[-1.2099e-01, -1.8300e-03, -5.9278e-02],
[-6.3114e-02, -7.8971e-02, 9.9564e-02],
[-2.1316e-01, -1.6137e-02, 4.8711e-02]],
[[ 1.0081e-01, -2.1790e-01, 2.4619e-02],
[-2.0954e-02, 3.8692e-03, 1.5579e-01],
[-4.7209e-02, -1.6994e-01, -1.1999e-01]],
[[-1.5041e-01, -7.9747e-02, 1.2214e-01],
[-2.8069e-01, 9.9540e-02, 7.9956e-03],
[-6.5677e-02, 2.3696e-01, 5.7232e-03]],
...,
[[-1.7447e-01, -3.7160e-01, -9.3113e-02],
[-1.0412e-01, -3.8574e-02, -1.0791e-01],
[ 1.1166e-01, 2.2002e-01, 2.9536e-02]],
[[-1.1629e-01, -8.3602e-02, 1.5802e-02],
[ 1.0714e-02, -1.4902e-01, -2.5317e-01],
[ 4.1658e-02, 1.7795e-02, 1.1709e-02]],
[[-2.8980e-02, 3.3602e-02, 6.8616e-03],
[ 7.5122e-02, 1.4940e-01, 1.1191e-01],
[ 4.9699e-04, 1.5019e-01, 1.1049e-01]]],
[[[-4.0664e-02, -1.2607e-01, 3.5816e-02],
[ 5.4645e-02, 2.8940e-02, 1.4006e-01],
[ 1.4487e-02, -6.0008e-02, -7.8013e-02]],
[[ 5.5773e-02, -1.4095e-02, 1.4715e-02],
[ 4.8455e-02, -5.7471e-02, -1.7442e-01],
[ 5.2410e-02, -2.7477e-02, -7.1401e-02]],
[[-1.1119e-01, 6.0967e-02, 2.7795e-01],
[-2.3294e-01, -1.9716e-01, 1.4873e-01],
[-2.5159e-01, 6.4109e-02, 1.1108e-01]],
...,
[[-1.2391e-01, -1.9620e-01, 1.7292e-01],
[-1.6168e-01, -9.9732e-02, 1.1005e-01],
[-1.3376e-01, 4.2675e-02, 2.2897e-01]],
[[ 8.5847e-02, -3.6293e-02, 7.2221e-02],
[ 4.9192e-02, -1.2367e-02, 1.4788e-01],
[ 3.8123e-02, 1.9096e-02, 1.7840e-01]],
[[ 1.3176e-01, -5.5210e-02, -4.5131e-02],
[ 7.0659e-02, -7.9989e-02, 4.7412e-02],
[-1.4791e-01, -1.9840e-01, -2.8646e-02]]],
...,
[[[-2.0557e-02, -2.2105e-01, -1.3611e-01],
[-7.2543e-02, 3.7831e-02, -9.1344e-02],
[ 6.7688e-02, 4.8227e-02, 2.2447e-02]],
[[-3.4498e-02, -8.4302e-02, -2.5571e-02],
[ 4.3986e-02, 6.0244e-02, -1.3645e-01],
[ 1.5978e-01, -9.3473e-02, -1.0118e-01]],
[[-2.7550e-01, -1.2159e-01, 6.7701e-02],
[-9.8232e-02, 3.7136e-02, -3.8575e-02],
[-6.0952e-02, -2.4417e-02, 3.1334e-01]],
...,
[[-6.9121e-02, -1.5683e-01, -6.6386e-02],
[ 6.1069e-02, -1.8817e-04, 8.7937e-02],
[-1.4118e-02, -4.3398e-02, -1.2796e-01]],
[[ 4.7683e-02, -1.3689e-01, -1.4183e-01],
[-1.9393e-03, 1.1427e-01, 4.1387e-03],
[ 5.3644e-02, 2.1444e-01, 3.2522e-02]],
[[ 6.6576e-02, -2.8029e-03, -6.1626e-02],
[ 1.3028e-01, 1.2281e-01, 9.5093e-02],
[-8.3654e-02, -7.9005e-02, -2.6559e-01]]],
[[[-3.5960e-01, -3.1618e-01, -3.8321e-01],
[-3.1018e-01, -3.8502e-01, -3.2862e-01],
[-2.2500e-01, -1.0425e-01, 2.4933e-01]],
[[-1.1209e-01, 1.6407e-02, -7.4722e-02],
[-1.0617e-01, -1.3866e-01, -1.4084e-01],
[-2.0275e-01, 2.0524e-03, -6.4584e-02]],
[[-7.0959e-02, -6.8066e-03, 4.4942e-03],
[-9.3635e-02, -8.1371e-03, 8.9391e-02],
[-3.4841e-02, 8.5070e-02, 7.4300e-02]],
...,
[[-1.6291e-01, -1.9277e-01, -4.2302e-02],
[-1.3033e-01, -2.0581e-01, 3.1694e-02],
[-9.3216e-02, 1.7538e-02, 1.2731e-01]],
[[-1.9834e-01, -1.1634e-01, 9.0010e-02],
[-7.2482e-02, 5.1883e-02, 1.7801e-01],
[-3.6766e-01, -1.4122e-01, 3.1167e-02]],
[[ 2.1799e-01, -7.1803e-02, -2.4150e-02],
[ 1.0952e-01, -1.4743e-01, -1.3650e-01],
[-2.0993e-01, -2.3419e-01, -1.9311e-01]]],
[[[ 1.0818e-01, 8.4627e-03, 1.3339e-01],
[-6.1825e-02, 7.7443e-02, 1.0514e-01],
[-2.2853e-01, -8.5882e-02, 8.1323e-02]],
[[ 1.1289e-01, 4.6977e-03, 4.3440e-02],
[-1.6647e-01, -2.2642e-01, -4.5278e-02],
[-6.9513e-02, -7.2843e-02, -5.2722e-02]],
[[-1.4692e-01, -4.5427e-02, 9.0759e-02],
[-3.9310e-02, 7.7809e-02, 8.7464e-02],
[ 2.0772e-01, 1.6934e-02, -1.5407e-01]],
...,
[[-4.7281e-02, 8.8432e-02, -1.2916e-02],
[ 5.3618e-03, 5.6413e-03, 1.7604e-01],
[ 5.0926e-02, -3.3531e-02, 3.6575e-03]],
[[-1.8066e-02, -1.2062e-01, -3.3258e-01],
[-7.7678e-02, -1.0679e-01, -1.5224e-01],
[-1.1572e-01, -2.0773e-01, -2.0576e-01]],
[[ 7.4232e-02, 2.1381e-01, 6.2227e-02],
[-3.2710e-02, 5.5009e-02, 7.9276e-02],
[-1.4546e-01, 8.3403e-02, 2.6440e-02]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([ 0.1536, 0.0650, 0.0855, -0.0285, -0.0048, -0.0535, 0.2263, 0.2687,
0.0396, 0.0600, 0.0725, -0.1152, 0.1430, -0.0503, 0.1728, -0.0074,
-0.0390, 0.1505, 0.0317, -0.0279, 0.1782, 0.1019, 0.1355, -0.0452,
-0.0332, 0.0482, 0.1508, -0.1291, 0.0569, -0.1185, 0.2000, 0.0167],
device='cuda:0')),
('fc_layers.0.weight',
tensor([[-2.0662e-02, 3.8362e-03, -4.5101e-02, ..., 2.7986e-04,
-3.5301e-02, -2.4038e-02],
[-9.0473e-03, -3.7022e-02, 1.6137e-02, ..., -3.5271e-02,
-7.1256e-02, 1.8524e-02],
[ 1.2605e-02, -3.3033e-02, -6.0627e-02, ..., -4.9432e-02,
1.6915e-02, -9.7512e-03],
...,
[ 7.9433e-02, -2.8498e-02, -3.5008e-02, ..., -1.4529e-01,
-1.5253e-01, -8.8298e-02],
[ 1.8789e-02, -2.1407e-02, -3.5681e-02, ..., -2.8856e-02,
-2.1257e-02, -4.2476e-02],
[-2.3663e-02, -2.5838e-02, -9.8102e-05, ..., 1.6340e-03,
5.1269e-02, 3.1797e-02]], device='cuda:0')),
('fc_layers.0.bias',
tensor([-0.0088, -0.0181, -0.0271, 0.0128, 0.2847, -0.0445, -0.0591, -0.0594,
0.0037, 0.0306, -0.0599, 0.0700, 0.0030, -0.0306, -0.0486, 0.0990,
0.0946, 0.0965, -0.0168, -0.0482, 0.0536, -0.0078, -0.0416, 0.0768,
-0.0325, 0.0241, -0.0212, -0.0671, -0.0450, 0.1832, 0.0165, -0.0493,
-0.0025, -0.0206, 0.1692, -0.0636, -0.1279, -0.0343, -0.0138, -0.0801,
0.0180, -0.0078, 0.0309, -0.0174, -0.0314, 0.0294, -0.0137, -0.0113,
-0.0136, 0.0060, 0.0678, 0.0869, -0.0164, -0.0176, 0.0344, 0.0319,
-0.0268, 0.1070, 0.1190, -0.0287, -0.0096, -0.0599, 0.1197, -0.0541,
0.0115, 0.0262, -0.0070, -0.0707, 0.0290, 0.1073, 0.3405, 0.1163,
0.0607, -0.0231, -0.0374, -0.0235, -0.0126, -0.0211, 0.0382, 0.0782,
-0.0835, -0.0692, -0.0452, 0.1148, -0.0028, 0.1263, -0.0032, -0.0327,
0.0086, -0.0503, 0.0741, 0.0325, -0.0548, -0.0809, -0.0173, 0.0073,
0.0073, -0.0680, 0.0082, 0.0275, 0.2334, -0.0587, -0.0351, 0.1362,
0.0173, 0.0264, -0.0314, 0.0085, 0.0030, -0.0336, -0.0895, 0.0487,
-0.0594, -0.0777, 0.0820, 0.1542, -0.1026, -0.0567, -0.0190, -0.0209,
-0.0278, 0.1884, -0.0700, 0.0105, -0.0165, 0.0057, 0.0807, -0.0160,
0.0009, -0.0079, 0.0378, 0.0613, -0.0553, 0.0211, -0.0827, -0.0469,
-0.0228, 0.0384, 0.1175, 0.1245, -0.0331, -0.0694, 0.0047, 0.0198,
-0.0054, -0.0021, 0.1477, 0.2305, -0.0427, -0.0433, 0.0112, 0.1281,
-0.0325, -0.1045, -0.0166, -0.0422, 0.0077, -0.0494, 0.0034, -0.0067,
-0.0537, -0.0591, -0.0684, -0.0383, -0.0005, 0.0174, -0.0267, -0.0256,
0.0245, -0.0483, -0.0439, -0.0408, -0.0199, -0.0642, 0.0069, -0.0161,
-0.0081, -0.0546, -0.0506, -0.0274, -0.0278, 0.0325, 0.0753, -0.0078,
-0.0700, -0.0250, -0.0101, -0.0846, 0.0529, 0.1150, 0.0424, 0.0066,
-0.0641, -0.0809, -0.0313, -0.0366, 0.1939, -0.0283, 0.0170, -0.0817,
-0.0596, 0.1363, 0.0092, 0.1465, 0.0344, -0.0864, 0.0897, 0.0663,
0.0584, -0.0853, -0.0159, 0.0810, -0.0388, 0.0740, -0.0175, 0.1912,
0.1418, -0.0510, 0.0229, -0.0040, -0.0281, -0.0708, 0.0846, 0.0668,
0.0799, -0.0542, -0.0697, 0.1113, -0.0977, -0.0402, -0.0161, -0.0067,
0.0177, -0.0288, -0.0178, 0.0581, -0.0062, -0.0141, 0.0630, -0.0579,
0.0240, 0.1136, -0.0098, -0.0125, -0.0279, 0.0682, -0.0350, 0.0419,
0.1425, -0.0241, 0.0199, -0.0573, -0.0766, -0.0212, -0.0864, -0.0609],
device='cuda:0')),
('fc_layers.2.weight',
tensor([[ 0.0026, 0.0110, -0.0521, ..., -0.1642, -0.0007, -0.0282],
[-0.0451, -0.0146, 0.0252, ..., -0.0650, -0.0273, -0.0433],
[ 0.0504, 0.0300, 0.0182, ..., -0.0412, -0.1002, -0.0210],
...,
[ 0.0231, -0.0223, -0.0339, ..., -0.1482, -0.0403, -0.0223],
[-0.0278, 0.0147, 0.0046, ..., -0.2304, -0.0339, -0.0198],
[ 0.0282, -0.0041, 0.0379, ..., -0.1782, -0.0557, -0.0649]],
device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.1179, 0.0409, 0.2139, 0.1003, 0.1385, -0.0747, -0.1249, -0.2279,
-0.1047, -0.1015], device='cuda:0'))])},
{'ratio': 0.75,
'bias': 128,
'train_losses': [286.005609935907,
258.19663520986916,
194.7077760026509,
167.70546944207962,
157.15172253770146,
149.3708754620835,
144.60329832539716,
140.91028001046305,
137.9823537288952,
136.40142538160555,
134.30298436062498,
132.81761256135988,
130.86215373847705,
129.00225028452866,
128.52474326841912],
'test_losses': [281.0017998639275,
224.79681773279228,
173.67945047453338,
157.9975126397376,
152.26528831556732,
143.77627449877122,
141.11272590300615,
136.2455851470723,
136.24535416621788,
129.32244628083473,
128.2226752720627,
125.96992638064366,
123.49493994899825,
122.30796426417781,
122.06648840156257],
'model_state_dict': OrderedDict([('conv_layers.0.weight',
tensor([[[[-0.0831, -0.2141, 0.0400],
[-0.0542, -0.2072, 0.1822],
[ 0.0537, 0.2338, 0.0641]],
[[-0.2387, -0.2827, 0.1217],
[-0.1396, -0.1512, 0.0389],
[ 0.1961, 0.2707, 0.1246]],
[[-0.0272, -0.1463, 0.1855],
[ 0.0209, -0.0630, -0.0607],
[ 0.0402, 0.0791, 0.0085]]],
[[[ 0.0435, 0.1656, 0.2380],
[-0.1597, -0.0408, -0.0186],
[ 0.0579, -0.0105, -0.2737]],
[[ 0.0315, 0.2935, 0.1733],
[-0.1115, 0.1251, -0.1215],
[-0.1704, -0.2001, -0.1135]],
[[ 0.1528, 0.1559, 0.1196],
[-0.1302, 0.0701, -0.0119],
[ 0.0225, -0.2228, -0.1209]]],
[[[-0.1503, 0.2420, 0.2737],
[-0.0642, 0.0822, 0.1435],
[-0.3232, -0.1542, -0.0462]],
[[ 0.0106, 0.0937, 0.2828],
[ 0.0094, -0.0830, 0.2029],
[-0.3509, -0.1290, -0.0018]],
[[ 0.0110, -0.1322, 0.0054],
[-0.0547, -0.0449, 0.0475],
[ 0.0069, 0.0787, 0.0787]]],
[[[-0.1502, -0.0556, 0.0629],
[-0.0984, -0.0153, 0.2264],
[ 0.1842, 0.0177, -0.0823]],
[[ 0.0350, 0.0814, 0.0353],
[-0.3077, -0.2117, 0.0508],
[-0.1376, -0.0806, -0.1188]],
[[-0.0522, 0.1729, 0.1871],
[ 0.0909, -0.0867, 0.2126],
[-0.0151, 0.1380, -0.0863]]],
[[[ 0.0907, 0.2840, -0.0892],
[ 0.2197, 0.1415, -0.1974],
[-0.1109, -0.1712, -0.1551]],
[[ 0.3287, 0.0799, -0.0413],
[ 0.2171, -0.1283, -0.1505],
[ 0.0865, -0.1350, -0.2776]],
[[-0.0310, 0.0773, -0.0817],
[ 0.2140, 0.0051, -0.0768],
[-0.0562, -0.1179, 0.1175]]],
[[[ 0.0967, -0.0049, 0.1137],
[ 0.0126, 0.0608, 0.0696],
[ 0.0037, -0.0522, 0.0505]],
[[-0.2263, -0.1507, 0.1427],
[-0.3847, 0.0124, 0.2121],
[-0.1428, -0.0581, 0.0577]],
[[-0.0134, -0.1823, 0.0015],
[-0.0401, 0.0338, 0.1030],
[ 0.1727, -0.1607, 0.0523]]],
[[[ 0.1837, 0.0386, 0.3022],
[ 0.1151, -0.0061, -0.1503],
[-0.1707, -0.1526, 0.0064]],
[[ 0.0895, 0.2295, 0.0940],
[ 0.1093, -0.2515, 0.1092],
[ 0.0910, -0.3702, -0.2709]],
[[ 0.1361, -0.0634, 0.0759],
[ 0.0928, -0.1206, -0.0404],
[-0.1218, -0.0596, 0.0441]]],
[[[-0.2781, 0.1333, 0.0908],
[-0.2386, -0.0713, 0.0007],
[-0.1826, -0.1042, 0.0907]],
[[ 0.1398, 0.2414, -0.1602],
[ 0.2122, 0.2541, 0.1315],
[ 0.1919, 0.0177, 0.0161]],
[[-0.2102, 0.0315, -0.0300],
[-0.3062, 0.1739, -0.0321],
[-0.1251, -0.1189, 0.1590]]]], device='cuda:0')),
('conv_layers.0.bias',
tensor([ 0.1808, 0.1651, 0.2167, -0.1767, 0.2043, -0.4574, -0.2930, -0.3687],
device='cuda:0')),
('conv_layers.2.weight',
tensor([[[[ 9.4108e-02, -1.5006e-01, 8.5462e-02],
[-1.6996e-01, -2.9267e-01, -3.8827e-02],
[-1.4888e-01, -1.9834e-01, 1.7224e-01]],
[[ 1.3884e-01, -1.3842e-02, -2.4756e-02],
[ 1.6096e-03, -8.1793e-02, -1.6721e-01],
[ 1.9808e-02, -7.6883e-02, 3.1160e-02]],
[[ 4.2470e-02, -2.0918e-01, -1.5910e-01],
[ 8.9149e-03, -2.8280e-01, -1.9359e-02],
[-9.6295e-02, -1.7552e-01, -9.3011e-03]],
...,
[[ 6.7544e-02, -1.4988e-01, -9.5181e-02],
[-1.8917e-01, -4.0013e-02, 4.3472e-02],
[-2.2085e-01, 7.0228e-02, 6.0833e-02]],
[[-1.2363e-01, -6.8920e-02, -1.1543e-01],
[-2.2108e-01, -9.8259e-02, -2.0146e-01],
[ 8.5233e-02, -1.0758e-01, -4.8054e-02]],
[[ 2.1597e-01, -1.3410e-01, -1.1836e-01],
[ 1.2793e-01, -3.6917e-01, -2.4651e-02],
[-4.6434e-03, -2.7203e-01, 7.0091e-02]]],
[[[-9.1157e-03, -2.2216e-01, -2.9308e-02],
[ 1.7763e-01, -5.9926e-02, -1.2846e-01],
[ 6.2724e-02, 2.5195e-02, 9.4612e-02]],
[[ 8.3565e-02, -1.5455e-03, -1.2226e-01],
[ 7.7612e-02, -6.9647e-02, -1.0298e-01],
[ 3.1552e-03, 5.8398e-02, 7.9884e-02]],
[[ 1.7219e-01, -1.8651e-01, -2.6849e-01],
[ 1.8359e-01, 2.8562e-02, -2.1843e-01],
[ 3.9873e-02, 1.1375e-01, -1.1773e-04]],
...,
[[ 1.1367e-01, -1.5311e-01, -1.3042e-01],
[ 2.0852e-02, 1.0136e-01, -1.2999e-01],
[-4.9628e-02, 1.8010e-01, 4.0218e-03]],
[[-2.0638e-01, -4.4143e-01, -4.6646e-02],
[-4.9793e-02, -4.9834e-02, 1.8489e-01],
[-1.0208e-01, -3.1374e-02, 3.6577e-03]],
[[-6.9678e-02, 4.9164e-02, 5.0145e-02],
[-1.9992e-01, 2.2351e-01, -1.3239e-01],
[-3.0525e-01, 9.1482e-02, -1.7506e-01]]],
[[[ 9.0982e-02, 3.9592e-02, 3.6133e-02],
[-2.7931e-01, -3.3952e-01, -1.9417e-01],
[-2.2403e-01, -1.6720e-01, 4.6033e-02]],
[[ 9.2806e-02, 9.0877e-02, -3.2645e-02],
[ 4.9930e-02, 2.3504e-01, -9.2325e-03],
[ 2.1241e-01, 1.8695e-01, 8.5698e-02]],
[[ 7.2341e-02, 1.2072e-01, 8.5758e-02],
[ 4.3947e-02, -3.2717e-02, 3.2977e-02],
[-6.7969e-02, -5.5295e-02, 2.7489e-02]],
...,
[[ 6.9644e-02, -2.5828e-01, -4.0288e-01],
[ 2.0657e-02, -1.4252e-01, -2.8118e-01],
[-1.2068e-01, -2.3084e-01, -1.8219e-01]],
[[-1.3720e-01, -1.1832e-01, -9.1167e-02],
[-2.3265e-01, -7.5985e-02, -1.9492e-01],
[ 4.3309e-02, 3.7731e-02, -5.0002e-02]],
[[ 1.4815e-01, 4.3231e-02, -3.6151e-02],
[-6.9831e-02, -6.1242e-02, -4.1159e-01],
[-8.6480e-02, -8.4924e-02, -2.4696e-01]]],
...,
[[[ 1.4193e-01, -3.9011e-02, -3.3058e-02],
[ 1.0378e-01, 1.0241e-01, 9.5738e-02],
[-5.0973e-03, 1.8293e-01, 1.8582e-01]],
[[ 9.9863e-02, -5.2715e-02, 3.9267e-02],
[ 2.2932e-02, -9.2719e-02, 3.8617e-02],
[-3.4902e-02, 1.0979e-01, 9.9887e-02]],
[[-3.0655e-03, -5.6668e-02, -3.8597e-02],
[ 9.6193e-02, -1.4407e-02, 1.3069e-01],
[ 3.4608e-02, 1.5643e-01, 1.8926e-01]],
...,
[[-1.1253e-01, 1.0379e-01, 1.0192e-01],
[-9.5464e-03, 7.0469e-03, 1.6348e-01],
[-4.6171e-02, 1.9678e-01, 1.8280e-01]],
[[-3.2484e-02, 2.0013e-01, 2.2662e-01],
[ 2.7115e-02, 1.2748e-01, 1.3620e-01],
[-1.0604e-01, 1.4935e-01, 3.2183e-02]],
[[-4.2123e-02, -6.8782e-03, 1.7387e-01],
[-5.7196e-02, -3.4011e-02, 1.6140e-01],
[-1.6534e-01, 8.7170e-02, 2.4743e-01]]],
[[[-2.2087e-02, 4.5554e-02, 7.1250e-02],
[ 9.9589e-02, -3.2848e-02, -8.5601e-02],
[ 5.3439e-02, -2.5162e-02, -6.0591e-02]],
[[ 7.9695e-02, 6.8020e-02, 8.0197e-02],
[-1.3489e-01, -5.9682e-03, 7.8435e-02],
[-3.8436e-01, -3.4369e-01, -2.5978e-01]],
[[ 1.9235e-01, 1.4427e-01, 1.1367e-02],
[-7.4350e-02, 1.1777e-01, 3.9365e-02],
[-1.0908e-01, -4.0703e-02, 5.5215e-03]],
...,
[[-2.0875e-01, -2.0989e-01, -4.7908e-02],
[-2.6748e-01, -2.5321e-01, -1.0266e-01],
[-1.8868e-01, -3.7929e-02, -6.2021e-02]],
[[-7.4465e-02, -1.5916e-01, 2.3685e-02],
[-1.8667e-01, -4.8340e-02, 1.2181e-01],
[-3.2755e-02, -1.5889e-01, -3.2824e-02]],
[[-2.6958e-01, -6.2016e-02, -6.7001e-02],
[-2.6637e-01, -2.2584e-01, -1.4587e-01],
[-1.2810e-01, -9.1953e-03, -1.6169e-01]]],
[[[ 1.7101e-02, 7.2749e-02, 3.8311e-02],
[ 7.2533e-02, -9.4855e-02, -1.6668e-01],
[ 4.7500e-02, 7.3465e-02, -1.1810e-01]],
[[ 7.0303e-02, -9.2167e-02, 1.8976e-02],
[-1.2325e-01, -1.3699e-02, -3.3071e-02],
[ 2.1204e-02, -1.7466e-02, -1.9458e-02]],
[[ 6.8918e-02, -1.9765e-02, -7.2854e-02],
[ 1.3759e-01, 3.2679e-02, -4.5057e-02],
[ 1.5024e-02, -2.8308e-02, 1.0178e-01]],
...,
[[-2.1905e-01, -6.5503e-02, -5.8232e-02],
[-1.8334e-01, 9.9053e-02, -1.1719e-01],
[-5.7166e-02, -7.2476e-02, -1.3387e-01]],
[[-5.3654e-02, -4.2043e-02, 1.2924e-01],
[-6.7154e-02, -1.7406e-01, -5.3667e-02],
[-1.4863e-02, -1.7594e-01, -4.3553e-02]],
[[-1.2465e-01, -1.5941e-01, 4.4127e-03],
[-2.1240e-02, -2.6975e-01, 1.7211e-01],
[-2.7937e-02, -1.4956e-02, 1.7181e-01]]]], device='cuda:0')),
('conv_layers.2.bias',
tensor([ 0.2199, 0.0338, -0.0795, 0.2029, -0.3677, -0.0755, 0.1590, 0.1050,
-0.0198, 0.2132, 0.0646, 0.1246, 0.1473, -0.2739, 0.2122, 0.2458],
device='cuda:0')),
('conv_layers.5.weight',
tensor([[[[ 1.1039e-02, 9.1640e-02, 1.6262e-01],
[ 6.7473e-03, -4.6301e-02, 9.2986e-02],
[ 1.6530e-02, -1.0910e-01, 5.6410e-02]],
[[-1.7051e-02, 1.4762e-02, -1.4670e-02],
[-1.2058e-02, -4.6900e-03, 3.2825e-02],
[-1.2109e-01, -1.9630e-01, -8.2420e-02]],
[[-3.7781e-02, 9.1817e-02, 2.2070e-01],
[ 3.3198e-03, 6.8911e-02, 9.7123e-02],
[-2.1411e-01, -9.3888e-02, 2.3877e-02]],
...,
[[-2.2922e-01, -2.5889e-01, -2.7922e-01],
[-3.1874e-01, -4.3836e-02, -2.0860e-01],
[-1.3016e-01, -2.0649e-01, -9.5828e-02]],
[[ 6.5089e-02, 3.5151e-03, 5.7095e-03],
[ 2.1636e-01, -5.1441e-02, -5.5110e-02],
[ 5.9357e-02, 5.1390e-02, 1.4364e-01]],
[[ 1.3676e-01, 1.4991e-01, -8.1794e-02],
[ 1.0943e-01, -5.1633e-02, -2.1114e-01],
[ 1.9679e-01, 1.5449e-01, 2.2826e-02]]],
[[[-1.1018e-02, 7.9155e-02, -3.3245e-02],
[ 8.1432e-03, 9.7636e-02, -1.9468e-02],
[-2.8864e-02, 1.1814e-01, 1.2624e-01]],
[[-5.7976e-02, -1.9123e-01, 3.0838e-02],
[ 7.7583e-05, 1.0457e-01, 4.2561e-02],
[ 3.5216e-02, 1.8238e-01, 3.5289e-02]],
[[-1.4071e-01, -1.0913e-01, -1.4648e-01],
[-7.2477e-02, -1.6835e-01, -2.6216e-01],
[ 1.3575e-01, -1.6446e-01, -3.9981e-01]],
...,
[[-2.0343e-01, 9.2555e-02, 2.0763e-01],
[-2.4772e-01, -3.7165e-02, 1.2338e-01],
[-1.3561e-01, -5.2331e-02, 8.5271e-03]],
[[ 1.3605e-01, 4.7768e-02, -1.8012e-01],
[ 1.0163e-01, -3.2995e-02, -1.4611e-01],
[ 1.5540e-02, -1.4736e-01, -3.2689e-02]],
[[ 9.4734e-02, 3.3830e-02, 1.7955e-02],
[-2.9635e-02, 8.6092e-03, -1.2950e-02],
[ 1.5669e-01, 1.3570e-01, -1.1956e-02]]],
[[[ 6.0219e-02, -4.6429e-02, -1.1373e-01],
[-1.1238e-02, -1.0784e-01, -1.0909e-01],
[-1.2033e-01, -2.1775e-01, 3.2515e-02]],
[[-7.4088e-02, -8.1893e-02, -1.6936e-01],
[-1.3867e-01, -4.3588e-02, -2.4186e-01],
[-7.7440e-02, -9.7821e-02, -2.1176e-01]],
[[ 9.2608e-02, 8.9836e-02, -1.2845e-01],
[ 6.9071e-02, -1.3772e-01, -3.5494e-01],
[-1.2960e-01, -2.2956e-01, -2.1434e-01]],
...,
[[-3.3737e-02, -1.1088e-01, -5.5615e-02],
[-1.7087e-02, 7.2982e-02, -4.4863e-02],
[-2.2570e-02, 5.2512e-03, 3.2976e-02]],
[[-1.2971e-01, 5.5150e-02, -1.1389e-01],
[-1.8630e-01, -3.4319e-02, -1.4308e-02],
[-8.8514e-02, 6.4388e-02, -6.4644e-02]],
[[ 2.1002e-02, -1.8487e-01, -2.9520e-01],
[-2.0969e-01, -1.2879e-01, -2.3740e-01],
[-1.2539e-01, -6.4267e-02, -9.3023e-02]]],
...,
[[[ 4.4922e-02, 2.1038e-02, -4.4872e-02],
[-4.8324e-02, -3.7595e-01, -2.3608e-01],
[-3.3366e-01, -4.0845e-01, -2.9204e-02]],
[[ 1.8922e-01, 1.9033e-01, 3.3777e-02],
[ 5.7986e-02, -1.6977e-01, -6.8078e-02],
[-7.0521e-02, -3.3899e-01, -2.3080e-01]],
[[ 1.0558e-01, 1.4889e-01, 1.2726e-01],
[ 4.9828e-02, 5.6143e-02, -1.9428e-02],
[ 8.6621e-03, 3.0633e-02, -4.7209e-02]],
...,
[[-9.7813e-02, -1.0676e-01, -4.1804e-02],
[-3.5068e-01, -1.5651e-01, -1.1734e-01],
[-1.5441e-01, -2.2737e-02, 9.1964e-03]],
[[ 1.1914e-01, 8.2845e-02, 1.0246e-01],
[ 1.5808e-02, 2.5613e-01, 1.6194e-01],
[-2.3360e-02, -1.2333e-01, -7.8280e-02]],
[[ 1.3227e-03, 8.1771e-02, 2.8258e-02],
[ 2.8249e-02, 8.4966e-02, 1.0172e-01],
[ 1.0550e-01, 1.2966e-01, 1.7137e-01]]],
[[[-5.4378e-02, -2.2019e-01, -2.4096e-01],
[ 1.0083e-01, -4.4852e-01, -1.6624e-01],
[ 3.5059e-02, -3.1115e-01, -1.2909e-01]],
[[-2.7712e-01, -5.2113e-02, -3.9840e-02],
[-1.3807e-01, -9.7716e-02, -4.2133e-03],
[-9.8489e-02, -1.8480e-01, -1.0041e-01]],
[[ 2.0767e-01, 8.8064e-02, 6.1950e-02],
[ 6.5590e-02, -1.1360e-01, -3.4348e-02],
[ 9.4918e-02, -6.0707e-02, -1.2431e-02]],
...,
[[-2.1503e-01, -1.9300e-01, -2.8395e-01],
[ 4.4094e-02, 3.1237e-02, -3.8466e-02],
[-3.6173e-02, -6.5892e-03, -3.9225e-02]],
[[-2.0338e-02, -1.1391e-01, -3.1916e-01],
[ 6.4796e-03, 2.8031e-02, -7.1150e-02],
[ 9.9058e-02, 1.3622e-01, 1.2612e-01]],
[[ 5.1304e-02, 7.5890e-02, -1.9360e-01],
[ 1.7055e-01, 8.9682e-02, 3.0448e-02],
[ 1.5374e-01, 1.8262e-01, 1.0027e-01]]],
[[[-3.2873e-02, -5.9079e-02, -4.0015e-02],
[-1.6628e-02, 5.7841e-02, -5.3598e-02],
[-7.1840e-02, 4.5817e-03, 6.3976e-02]],
[[-4.9596e-03, 4.1507e-02, -9.0960e-02],
[-1.6971e-02, -1.4841e-02, -1.1176e-01],
[-1.1740e-01, -1.1283e-01, -4.6648e-02]],
[[-8.0292e-03, -4.3521e-03, -1.4551e-02],
[-3.0768e-02, -4.1859e-02, -8.5865e-03],
[-9.2757e-02, -1.8328e-02, -2.0913e-02]],
...,
[[-9.1072e-03, 3.6616e-02, -8.4028e-02],
[-5.8986e-02, -9.1237e-02, -3.6161e-02],
[ 2.2685e-02, -6.7287e-02, -4.2446e-02]],
[[-7.2397e-02, -1.6121e-02, -2.5111e-02],
[ 2.2499e-02, -2.0515e-02, -7.9291e-02],
[ 4.5610e-02, 1.5722e-02, 1.6985e-02]],
[[-5.1397e-02, 1.0615e-02, -8.7217e-02],
[-4.8947e-03, 1.2023e-02, -8.6109e-02],
[-1.8900e-02, 4.3436e-03, 6.9636e-03]]]], device='cuda:0')),
('conv_layers.5.bias',
tensor([ 0.1365, -0.0236, -0.0886, 0.0633, -0.1117, 0.1074, -0.0509, -0.0088,
0.1975, 0.0688, 0.0501, 0.2613, -0.0961, -0.0552, -0.0831, 0.3126,
0.1999, -0.1161, -0.0739, 0.0293, 0.2419, -0.0140, -0.0484, 0.1105,
-0.1061, 0.2062, -0.1501, -0.1561, -0.1216, 0.0810, 0.0415, -0.0931],
device='cuda:0')),
('conv_layers.7.weight',
tensor([[[[-3.3149e-01, -8.4730e-02, 1.9093e-01],
[-2.7340e-01, -6.1013e-02, 8.2410e-02],
[-4.5423e-02, 9.4536e-02, 2.3440e-01]],
[[ 6.6776e-02, 2.0110e-01, 1.5625e-01],
[-2.0989e-03, -2.4528e-03, -3.4167e-02],
[ 7.7898e-02, -3.8268e-02, -5.7803e-02]],
[[ 1.0163e-01, 6.4765e-02, -2.4113e-01],
[-6.3418e-03, -1.3067e-01, -6.5902e-02],
[-7.5090e-02, -5.2109e-02, -5.9170e-03]],
...,
[[ 9.7753e-02, -4.0372e-02, -2.0571e-01],
[-1.0236e-05, -5.8864e-02, -1.5021e-01],
[ 7.5745e-02, -2.1302e-02, -2.5066e-02]],
[[-1.5329e-01, -1.4194e-01, 9.3008e-02],
[ 9.8032e-02, 6.7513e-02, 1.8391e-01],
[-9.2357e-04, 1.1598e-01, 9.5667e-02]],
[[-5.3329e-02, 2.5837e-02, 1.0627e-02],
[-2.1969e-02, -5.9045e-02, 2.1969e-02],
[-4.8398e-02, -1.7122e-02, -3.8750e-03]]],
[[[-1.0860e-01, 6.8980e-02, 5.6014e-02],
[ 7.7401e-02, 1.9012e-01, 2.4057e-01],
[ 1.8747e-01, 8.7938e-02, -5.5471e-03]],
[[-4.7152e-02, -1.1408e-01, -6.1996e-02],
[-3.8547e-02, -1.3943e-01, -1.5452e-01],
[-4.4624e-02, -1.4696e-01, -1.1746e-01]],
[[-4.0347e-02, -2.9609e-02, -4.9954e-02],
[ 1.0604e-01, 2.5689e-02, -1.0712e-01],
[ 5.2598e-02, -8.1865e-02, 1.2046e-02]],
...,
[[ 1.2879e-01, -1.4183e-01, -2.0892e-01],
[-9.8640e-02, 4.1414e-02, 4.7106e-02],
[-2.5543e-02, 7.8436e-02, 1.1188e-01]],
[[ 1.5647e-01, 2.3803e-02, -9.7645e-02],
[-3.3421e-01, -2.7822e-01, -2.5992e-01],
[-2.3451e-01, -1.2791e-01, -1.6102e-01]],
[[ 1.1209e-02, -1.4151e-03, -1.7652e-03],
[ 2.7476e-02, -3.3040e-02, -1.8847e-02],
[ 2.9657e-02, -6.1563e-02, -1.8032e-02]]],
[[[-5.5321e-02, -3.7054e-02, -8.8110e-02],
[-2.6526e-01, -2.0278e-01, -3.8100e-02],
[ 1.2685e-02, 2.0359e-02, 9.2282e-02]],
[[ 1.9107e-01, -1.1723e-01, -8.9571e-02],
[ 1.2154e-01, 4.6371e-02, -5.1076e-02],
[ 3.9062e-02, -2.0028e-02, -8.7462e-02]],
[[ 2.5222e-01, 1.6088e-01, 1.3917e-01],
[ 8.4246e-02, -1.5591e-02, 1.0150e-01],
[-3.2169e-02, -4.5843e-02, 1.9801e-02]],
...,
[[ 7.9666e-02, -3.7987e-02, 3.6573e-02],
[-9.8065e-02, 8.1258e-02, 9.6724e-02],
[ 2.9344e-02, 1.0717e-01, 1.1665e-02]],
[[-5.6375e-02, -1.4094e-01, 2.1579e-01],
[-1.9625e-01, 2.7479e-02, -2.2517e-02],
[-9.3512e-02, -1.8484e-01, -5.7323e-02]],
[[ 3.1416e-02, -1.4916e-03, -7.8456e-03],
[ 4.4631e-02, 2.9087e-02, 6.0536e-04],
[-3.0877e-02, 3.4887e-02, -5.5369e-02]]],
...,
[[[-3.2555e-02, 1.6457e-01, -2.0311e-02],
[ 4.6692e-02, 7.8278e-02, 1.0093e-02],
[ 1.5846e-01, 3.8821e-02, -1.6114e-01]],
[[ 8.6054e-03, -2.7654e-02, -2.3354e-02],
[-1.3202e-01, -1.4520e-01, 5.9284e-02],
[-6.5489e-02, -5.5461e-02, 1.4732e-01]],
[[-1.4110e-01, 5.8553e-03, 4.8074e-02],
[-2.1911e-02, -1.2263e-03, 1.3347e-01],
[-4.0113e-02, 1.3600e-01, 6.2419e-02]],
...,
[[ 2.7116e-01, -1.1151e-01, -6.0417e-02],
[-1.0661e-01, -1.0530e-01, 8.6462e-03],
[-4.9761e-02, 4.2781e-02, 4.8146e-03]],
[[ 6.3303e-02, 1.0233e-01, -7.0044e-02],
[-2.0853e-01, -6.1496e-02, -1.0870e-02],
[-2.2970e-01, 3.1365e-02, 4.0480e-02]],
[[ 3.0829e-02, 5.1681e-02, -4.0110e-02],
[ 5.0925e-02, -9.0786e-02, -7.3619e-02],
[-6.9064e-03, -5.3913e-02, 3.5250e-04]]],
[[[-1.1825e-01, -1.7252e-01, 1.7576e-01],
[-1.0786e-01, -2.2788e-01, 3.2866e-02],
[-2.8690e-02, -1.0585e-02, 2.1509e-01]],
[[ 1.0316e-01, 1.4765e-01, -1.3647e-01],
[-9.3758e-02, 8.1375e-02, -1.6648e-01],
[-1.0524e-01, -9.3622e-02, -1.2162e-01]],
[[-2.0026e-01, 6.9327e-02, 3.2381e-02],
[-2.9988e-01, -1.2609e-01, -1.0644e-01],
[-2.9709e-01, -4.7330e-02, 3.6614e-02]],
...,
[[ 1.6899e-01, 1.8257e-02, -6.3698e-04],
[-3.7275e-02, -8.1316e-03, 2.1445e-02],
[ 5.2483e-02, 2.6663e-02, 1.8647e-01]],
[[-2.7249e-02, 3.9788e-03, 1.2530e-01],
[ 4.7858e-02, 2.7167e-02, 1.4168e-01],
[-1.3209e-01, -1.0509e-01, 2.5165e-02]],
[[-2.5053e-02, 2.8034e-02, -9.1838e-03],
[-1.6059e-03, -1.5603e-02, 2.6337e-02],
[-1.1146e-02, -4.2634e-02, -4.8287e-02]]],
[[[ 2.3874e-02, 5.7808e-02, -1.0718e-01],
[ 1.3280e-01, 1.2467e-01, 1.5654e-01],
[ 1.0264e-01, 6.8027e-03, -1.2034e-01]],
[[ 9.2593e-02, -5.0421e-02, -4.4791e-02],
[-4.3504e-02, 3.8000e-02, -9.9438e-03],
[-1.2769e-01, -4.4479e-02, 6.0356e-02]],
[[ 7.2140e-02, -5.5984e-03, -7.1412e-02],
[ 7.7147e-02, 1.2446e-01, -1.6244e-02],
[-6.3259e-02, -1.5509e-01, -1.8094e-01]],
...,
[[ 1.8368e-01, 1.9931e-02, -1.3193e-01],
[ 1.8692e-01, 1.5538e-01, 1.0340e-03],
[ 5.5660e-02, -1.0439e-01, 1.3669e-02]],
[[-2.6299e-03, -1.1866e-01, -1.3741e-01],
[ 1.2336e-01, 9.6261e-03, -4.5446e-02],
[-6.9179e-02, -1.0918e-01, -5.3425e-02]],
[[ 2.5545e-02, -1.5920e-02, -6.0237e-05],
[ 5.1768e-02, 1.0586e-02, -5.0890e-03],
[ 1.2735e-02, -5.4902e-03, 3.4854e-02]]]], device='cuda:0')),
('conv_layers.7.bias',
tensor([ 0.0101, 0.0147, 0.1448, -0.1324, 0.1121, -0.1127, 0.1744, -0.1155,
0.0957, -0.1741, 0.0908, 0.2373, 0.0082, 0.1008, 0.0247, -0.0082,
0.1175, -0.0843, 0.1162, -0.0122, -0.0676, -0.0783, 0.0951, 0.0286,
0.1079, 0.1652, 0.0518, 0.0075, -0.1283, -0.0090, 0.0794, 0.1314],
device='cuda:0')),
('conv_layers.10.weight',
tensor([[[[-2.6756e-01, -1.1515e-01, 4.6295e-02],
[-1.0395e-01, 1.0583e-01, 8.5825e-02],
[ 1.3447e-01, 1.8920e-01, 1.2478e-01]],
[[ 1.5387e-02, -6.2432e-02, 1.3551e-01],
[ 1.4412e-01, 2.2217e-01, 7.8488e-02],
[ 2.0742e-01, 2.5604e-01, 1.0034e-01]],
[[-6.8321e-02, 2.7352e-02, 1.5208e-03],
[-8.6727e-02, -4.7562e-02, 3.5178e-02],
[ 2.9271e-02, -7.8074e-02, -2.3867e-01]],
...,
[[-2.2495e-01, -4.0058e-03, 5.7336e-02],
[-1.3521e-02, 5.2172e-02, 1.4294e-01],
[ 1.7605e-02, 7.2122e-02, -5.7146e-02]],
[[ 9.4208e-02, -8.6914e-02, 1.3151e-01],
[ 2.5519e-02, 2.3351e-02, 7.2108e-02],
[ 1.7897e-01, 7.9548e-02, 7.7353e-02]],
[[ 1.3626e-01, -5.8556e-02, -9.7922e-06],
[ 5.5581e-02, -1.8054e-01, -1.2540e-01],
[ 2.5047e-02, 1.1747e-04, -9.5544e-02]]],
[[[-4.4142e-02, -4.3976e-02, -1.4250e-01],
[ 1.8365e-01, 1.0683e-01, -4.9002e-02],
[ 1.7554e-01, 1.4487e-01, -6.8629e-02]],
[[-1.2344e-01, -1.0758e-01, -1.2662e-01],
[-8.9425e-02, -1.4316e-02, 5.2939e-02],
[ 1.4609e-01, -6.9190e-02, -6.2497e-02]],
[[-6.9243e-02, 9.0199e-02, 1.4777e-01],
[-3.1090e-02, 6.6316e-02, 7.4347e-02],
[ 7.7104e-02, -5.6184e-02, -7.5728e-03]],
...,
[[ 9.5130e-02, -7.4277e-02, -1.5651e-01],
[-1.2076e-02, -1.0633e-01, -1.1661e-01],
[-4.7053e-02, -1.2463e-01, -9.1151e-02]],
[[ 7.8706e-02, 1.1987e-02, -5.7872e-03],
[ 1.1598e-01, 9.1181e-05, -2.6795e-02],
[ 4.4838e-02, -3.2948e-02, -2.4684e-02]],
[[-3.3267e-01, -2.6648e-01, -1.4624e-01],
[-2.9867e-01, -2.4553e-01, -1.3886e-02],
[-2.4734e-01, -2.0789e-01, -6.9652e-02]]],
[[[ 4.9484e-02, 1.8182e-01, 2.9865e-01],
[-4.9055e-02, -1.4792e-02, 2.5469e-02],
[-2.0631e-01, -1.0193e-01, -1.5822e-01]],
[[ 6.4126e-02, -1.0088e-01, -1.3646e-01],
[-1.4322e-01, -1.3973e-01, -4.9852e-02],
[ 2.4551e-02, -1.5711e-02, 2.9570e-02]],
[[ 2.9600e-02, 1.3374e-01, -5.5436e-02],
[-1.0051e-01, -3.8248e-02, 5.1709e-03],
[ 9.9179e-02, -9.1256e-03, 2.5807e-02]],
...,
[[ 8.1981e-02, 1.7447e-01, 1.3017e-01],
[ 5.1020e-02, 1.1954e-01, 1.1059e-01],
[-1.0157e-01, -4.1885e-02, 8.8788e-03]],
[[-6.2215e-02, -1.1443e-01, 8.7998e-02],
[-1.0609e-01, -2.8068e-01, -1.6784e-02],
[-9.0756e-02, -2.8897e-01, -1.5623e-01]],
[[ 1.6200e-01, -5.3078e-03, -3.1400e-01],
[ 1.2060e-01, -9.7127e-03, -8.6134e-02],
[ 9.8960e-02, 1.6281e-02, -1.0220e-03]]],
...,
[[[ 2.3137e-02, -7.6884e-02, -1.5399e-01],
[ 1.0386e-01, 2.7003e-02, -1.2547e-01],
[ 2.5899e-02, 7.5766e-02, -4.6824e-02]],
[[-9.6444e-02, 1.7808e-02, 1.0507e-01],
[ 8.1927e-03, 5.4973e-02, 1.3570e-01],
[ 1.4762e-02, -9.4179e-02, -5.3205e-02]],
[[-5.5703e-02, -8.2347e-02, 4.7630e-02],
[-4.4490e-02, -5.2999e-02, -3.7649e-02],
[-2.5952e-02, -8.6468e-02, -7.5314e-02]],
...,
[[-3.9258e-02, -1.6766e-02, 7.0580e-03],
[ 4.3758e-02, 3.4391e-02, 2.8933e-02],
[ 7.3723e-02, -6.3211e-03, 3.9388e-02]],
[[-1.0829e-01, -1.3310e-01, 7.0357e-02],
[-9.9231e-02, 9.5057e-03, 4.9887e-02],
[-1.1682e-01, 8.9637e-02, 9.7632e-02]],
[[-8.8089e-02, -2.1878e-01, -1.2250e-02],
[-3.0961e-01, -1.1608e-01, 1.3306e-01],
[-1.2951e-01, -1.0972e-02, 2.9117e-02]]],
[[[ 1.2140e-01, -8.2725e-03, -2.3228e-01],
[ 1.6858e-02, -1.8973e-01, -1.0441e-01],
[-9.1029e-02, -7.0532e-03, 8.3450e-02]],
[[ 7.9676e-02, 1.7277e-01, -1.7647e-02],
[-9.5268e-02, -2.0802e-01, -1.1304e-01],
[-3.6671e-02, 6.8706e-02, -3.4182e-02]],
[[-4.3892e-02, 2.7231e-01, -2.8302e-03],
[ 1.6058e-01, 6.2527e-02, 7.7259e-02],
[-6.6144e-02, 1.6624e-01, 9.4013e-02]],
...,
[[ 2.8344e-02, -7.4201e-03, -3.7854e-01],
[ 3.4454e-03, 4.4284e-02, -1.2270e-01],
[ 6.5777e-02, 1.5417e-01, -9.1811e-02]],
[[ 1.1903e-01, -1.9724e-01, 4.7265e-02],
[ 3.7997e-03, -1.4461e-01, 2.6700e-01],
[-1.1480e-01, 1.6379e-01, 1.9187e-01]],
[[-2.8284e-02, 9.0316e-02, 2.1835e-01],
[-5.5535e-02, 1.9190e-01, 5.5383e-02],
[-1.7336e-01, -1.7918e-01, 1.4625e-01]]],
[[[ 1.3790e-01, 2.3509e-02, -2.0530e-01],
[ 1.3205e-01, 1.0041e-01, -1.1712e-01],
[ 1.7803e-01, 1.3925e-01, -2.5892e-02]],
[[-2.1795e-01, -7.1205e-02, -8.9675e-02],
[-7.6727e-02, 1.2213e-02, 3.5341e-02],
[ 2.5739e-02, 7.8149e-02, 4.2404e-02]],
[[-6.2839e-02, 1.8668e-02, 1.4342e-01],
[-1.5145e-01, -6.5443e-02, 1.0189e-02],
[-1.9114e-01, -1.6323e-01, -1.9963e-01]],
...,
[[ 4.1194e-02, 9.5024e-02, -4.8267e-02],
[ 1.3227e-01, 1.9242e-01, 3.2417e-02],
[-4.3900e-02, 1.6599e-02, 1.2314e-03]],
[[-1.9880e-01, -8.5040e-02, -2.0125e-01],
[ 3.1393e-02, -4.2809e-03, -5.7906e-02],
[ 1.4049e-01, 1.5305e-01, 1.3699e-01]],
[[ 4.3130e-03, 4.7225e-02, 2.1809e-01],
[-1.3564e-01, -1.2983e-01, 1.8201e-01],
[-5.3178e-02, -1.0824e-01, 2.1534e-02]]]], device='cuda:0')),
('conv_layers.10.bias',
tensor([ 0.0796, 0.2337, 0.1853, -0.0872, -0.1046, 0.0261, 0.1333, -0.0818,
0.2830, 0.0477, -0.0965, -0.0459, 0.1634, -0.1095, 0.0724, 0.1129,
-0.0493, -0.2030, -0.0663, 0.2753, 0.1779, 0.0779, -0.0385, -0.1213,
0.1388, 0.2156, 0.1215, -0.0712, 0.0629, 0.1202, 0.0281, 0.1625],
device='cuda:0')),
('conv_layers.12.weight',
tensor([[[[ 5.7963e-02, 4.9288e-02, -2.6573e-02],
[ 4.2155e-02, 2.1429e-03, -4.7267e-02],
[ 1.0902e-01, -6.7857e-02, -1.5693e-01]],
[[ 5.3216e-03, 9.9100e-03, 7.1405e-02],
[ 8.8353e-02, 1.2021e-03, 5.4991e-02],
[-2.7818e-02, 1.3317e-02, -9.3589e-02]],
[[-3.6688e-02, 4.7998e-02, 2.0891e-01],
[-1.6815e-01, -1.7059e-02, 3.2192e-01],
[-2.2306e-01, 9.9943e-02, 2.8860e-02]],
...,
[[-7.5253e-03, -3.9826e-02, -2.4202e-02],
[-9.7245e-02, 8.7474e-03, 7.4297e-02],
[ 1.2447e-01, -3.5142e-02, 5.9843e-02]],
[[-2.0445e-02, -2.1755e-01, -2.9727e-01],
[-5.0216e-02, -2.0144e-01, -1.7170e-01],
[-9.0996e-02, -4.0734e-02, -3.1549e-01]],
[[-1.5631e-01, 6.7860e-02, 6.8251e-02],
[ 8.5538e-02, 1.2965e-01, 2.6637e-01],
[ 1.4629e-02, -2.5086e-03, 1.1597e-01]]],
[[[-3.5816e-02, 1.4991e-01, 2.2300e-01],
[ 6.6729e-02, -1.9047e-02, 1.1413e-01],
[-6.7205e-02, -2.5791e-01, -7.1892e-02]],
[[ 3.9898e-02, -3.1397e-02, 1.4480e-01],
[-8.9428e-02, -1.6355e-01, -1.7864e-01],
[-3.3243e-02, -1.0898e-01, 1.7343e-02]],
[[ 3.5057e-02, -3.9270e-02, -6.0403e-02],
[-1.2760e-01, -1.4016e-01, -1.2363e-01],
[-6.5282e-02, -6.2425e-03, -1.3548e-01]],
...,
[[-8.3898e-02, -5.0962e-03, 1.1037e-01],
[-5.8690e-03, 5.1803e-02, -2.8563e-02],
[-4.6389e-02, 7.0045e-03, -9.5826e-02]],
[[ 1.4876e-01, -7.1979e-02, -8.1750e-02],
[-4.2775e-01, -1.5964e-01, -9.8797e-02],
[ 1.1424e-01, 1.2483e-01, -9.4772e-02]],
[[-2.7195e-02, -2.0158e-02, 1.1411e-01],
[-7.8373e-02, -1.5098e-01, -5.8511e-02],
[-6.9604e-02, -2.1330e-02, -2.5470e-02]]],
[[[-3.0715e-03, -3.7657e-02, -9.8320e-02],
[-7.8402e-02, -2.6468e-02, -9.2191e-03],
[-6.9625e-02, -5.7747e-02, -7.5805e-02]],
[[-2.2274e-02, 8.7470e-03, -7.6252e-02],
[-8.6493e-02, 2.6160e-02, 1.9582e-02],
[-5.8257e-02, -5.9879e-02, -1.3937e-02]],
[[ 2.8675e-02, -3.8858e-02, 1.1301e-02],
[-4.9586e-02, 3.4557e-02, 6.4728e-03],
[ 1.0786e-02, 2.2407e-02, 5.3677e-02]],
...,
[[ 1.3728e-02, -5.2479e-02, -1.1010e-01],
[ 2.4051e-03, -3.3614e-02, -6.2793e-02],
[ 2.0625e-02, 1.1473e-03, -3.3369e-03]],
[[-4.0663e-03, -2.8863e-02, -4.0014e-02],
[-2.8524e-02, 1.4733e-02, -2.2825e-02],
[-1.1408e-02, -2.8542e-02, -8.2511e-02]],
[[-4.8357e-03, -6.0432e-02, 1.7796e-02],
[-6.9727e-04, 1.0095e-02, -1.9711e-02],
[ 1.2001e-02, -4.0031e-02, -1.5545e-02]]],
...,
[[[-2.0078e-01, 2.1117e-02, 1.5347e-01],
[-1.5146e-01, 6.5237e-02, 1.8788e-01],
[ 2.3454e-02, 1.2866e-01, 2.9413e-01]],
[[-3.7031e-02, -1.1452e-01, -6.2603e-02],
[ 2.4561e-02, 4.0511e-03, -9.2671e-03],
[-7.9670e-03, 1.1119e-01, 1.6635e-01]],
[[ 1.4160e-01, 1.6737e-01, 1.3968e-01],
[ 2.1730e-01, 4.9700e-02, -8.7731e-02],
[-4.7046e-02, -9.5150e-02, -4.5136e-02]],
...,
[[ 5.5272e-02, 4.6913e-02, 1.9110e-01],
[-1.3490e-02, 7.1670e-02, 9.7697e-02],
[ 9.4593e-02, 4.4740e-02, 9.3439e-02]],
[[ 1.0773e-01, 2.9414e-02, 1.3448e-01],
[-6.8102e-02, -1.6841e-01, 1.4036e-01],
[-6.8541e-02, -9.0082e-02, 1.3721e-01]],
[[ 7.9165e-03, 9.7715e-02, 1.9288e-01],
[-3.3795e-02, 6.0217e-02, 1.2659e-01],
[ 4.9798e-02, 2.0553e-02, 3.4552e-02]]],
[[[ 6.6777e-02, 1.6501e-01, -7.9637e-02],
[ 7.0596e-02, 3.7708e-02, -1.1672e-01],
[ 1.0682e-01, 8.5524e-03, -1.4350e-01]],
[[ 8.3364e-02, 6.5216e-02, -1.9231e-01],
[ 1.0413e-01, -5.5511e-02, -3.3764e-01],
[ 5.0924e-02, -1.0456e-01, -4.1149e-01]],
[[-3.6877e-01, -1.9218e-01, -1.5603e-01],
[-2.1344e-01, -9.0347e-02, 4.1147e-02],
[-2.4876e-01, -7.6765e-02, 1.4148e-01]],
...,
[[ 8.9547e-02, -9.1532e-02, -1.9001e-01],
[ 3.7244e-02, -2.0003e-01, -1.0590e-01],
[ 2.2178e-01, 1.3772e-01, -6.1669e-02]],
[[-1.9552e-01, -7.7376e-02, -1.7308e-01],
[-4.6343e-02, -1.5503e-01, -1.2731e-01],
[-1.0114e-01, -2.4989e-01, 1.8386e-01]],
[[ 4.0021e-02, -1.2553e-01, -1.8682e-01],
[-9.2273e-03, -1.2105e-01, -1.3929e-01],
[ 1.3424e-01, -8.1632e-03, 1.2522e-02]]],
[[[ 4.9459e-02, 5.7277e-02, -1.0958e-01],
[ 1.8008e-02, 1.8347e-02, 5.9727e-03],
[ 1.3724e-02, 1.3408e-02, 8.1452e-02]],
[[-3.8002e-02, -1.4002e-01, -2.0876e-01],
[-7.2546e-02, 1.5576e-02, 1.0262e-02],
[ 2.2784e-05, -1.6702e-03, 1.5322e-01]],
[[-6.5118e-02, 1.8329e-01, 3.3285e-02],
[-1.2403e-02, 7.2599e-02, -2.6558e-01],
[-3.7059e-01, -2.6436e-02, 1.4574e-01]],
...,
[[ 3.1124e-02, 1.1499e-01, -1.2980e-01],
[ 1.1193e-02, 4.3660e-02, 1.0134e-01],
[-5.5773e-02, 2.0391e-02, 1.8495e-01]],
[[-8.9290e-02, -2.5757e-01, -2.4744e-01],
[-5.0920e-02, -1.0168e-01, -1.1623e-02],
[-9.0944e-02, -2.0082e-01, -3.2128e-02]],
[[ 8.8311e-02, 7.7653e-02, -7.1598e-02],
[ 3.5708e-01, 1.4579e-01, 1.6296e-02],
[ 2.2355e-01, 1.4632e-01, 1.3167e-01]]]], device='cuda:0')),
('conv_layers.12.bias',
tensor([-3.9502e-02, 2.3040e-02, -8.0021e-02, -1.3592e-02, 1.2175e-04,
2.5499e-02, -5.6343e-02, -1.9519e-02, 7.5878e-02, -8.7756e-02,
2.8813e-02, 1.3735e-01, 1.7172e-01, -3.1268e-03, -2.6470e-03,
7.8613e-02, -1.1964e-01, -1.2649e-01, 2.3784e-01, -2.7362e-03,
4.5999e-02, -1.1683e-01, -6.4516e-02, -1.0553e-01, -1.4797e-02,
1.1175e-01, 2.9950e-02, -5.2819e-02, -2.4161e-02, -7.4739e-02,
-8.0514e-02, -4.6515e-02], device='cuda:0')),
('fc_layers.0.weight',
tensor([[ 0.0134, -0.0168, 0.1489, ..., 0.0080, -0.1296, -0.1599],
[-0.0392, -0.0129, 0.0162, ..., -0.0070, 0.0091, 0.0128],
[ 0.0989, 0.1119, 0.0658, ..., -0.2474, -0.1497, 0.1268],
...,
[ 0.0400, 0.0020, 0.0139, ..., 0.0194, -0.0852, -0.0795],
[-0.1046, -0.0453, -0.0477, ..., 0.0032, 0.0174, 0.1708],
[-0.0107, -0.0232, -0.0232, ..., 0.0019, 0.0250, 0.0641]],
device='cuda:0')),
('fc_layers.0.bias',
tensor([ 3.3737e-02, -6.3531e-02, 8.2412e-02, -4.8122e-02, -3.7942e-02,
-1.0233e-02, -1.3329e-01, -2.0446e-02, 1.6626e-01, -1.1495e-01,
-5.9927e-02, -6.0561e-02, 2.1325e-01, -3.8245e-02, 8.1060e-03,
-8.3573e-02, 1.3280e-02, -9.2221e-02, -4.2055e-02, -5.8392e-02,
-4.9213e-04, 2.1032e-01, 5.3515e-02, 4.4350e-03, 3.4158e-03,
-1.2594e-01, 1.0379e-01, 1.4527e-01, -6.8962e-02, -1.0548e-01,
2.0350e-02, -1.8688e-02, -7.5845e-03, 1.5687e-01, -1.9957e-02,
1.3485e-02, -7.1717e-02, -1.3712e-02, -1.1056e-01, -9.5263e-02,
-7.2991e-02, -7.0043e-02, -8.7112e-02, 5.4135e-04, 3.7090e-02,
-4.7928e-02, -9.0835e-03, 1.6565e-01, -1.6095e-03, 4.3089e-02,
-3.4354e-02, 1.9188e-01, -3.2153e-02, 3.8820e-02, -5.1198e-02,
-3.8640e-02, -1.0192e-02, 2.0344e-03, 5.8275e-02, -1.8172e-02,
-6.0892e-02, 1.4952e-02, -6.1340e-02, -8.1210e-03, -5.3202e-02,
-3.4612e-02, 6.7383e-02, -5.6015e-02, 1.2375e-02, -2.3155e-02,
-8.2325e-02, -5.7303e-02, -1.2783e-01, 1.3162e-02, 1.0232e-01,
-2.6725e-02, -8.8501e-02, 9.8558e-02, -2.3936e-02, -5.8733e-02,
9.0249e-02, 3.9059e-02, -6.8925e-02, -6.9687e-02, -2.1736e-02,
-9.0963e-03, 1.8455e-01, 6.0882e-02, 7.4398e-02, -4.9180e-02,
-1.9082e-03, -4.4387e-02, 1.0227e-01, -7.5234e-02, -2.9876e-02,
-5.0744e-02, -4.1421e-02, -2.2110e-02, 1.2090e-01, -5.4899e-02,
1.4753e-01, -6.1984e-02, -9.3792e-02, -9.1605e-03, 2.9590e-02,
-5.1517e-02, -5.2968e-02, -6.2034e-02, -4.5369e-02, -3.9578e-02,
-3.9806e-03, 2.8616e-03, -6.0046e-02, -5.5356e-03, -6.2036e-02,
-2.0452e-02, 2.3375e-02, 2.1804e-02, -6.2821e-02, -9.0789e-02,
-2.3709e-02, -3.8402e-02, -3.6805e-02, -7.1154e-03, -5.4095e-02,
-5.0098e-02, -1.3038e-02, -1.9683e-02, -3.4951e-02, 1.7124e-01,
7.8282e-03, -2.0733e-02, -6.0946e-02, 1.3737e-01, 2.9403e-01,
-5.5723e-02, -9.2122e-02, -2.7139e-02, -8.4328e-03, 1.6097e-01,
-2.5881e-02, 2.1426e-02, 1.0852e-02, -6.2416e-02, 5.9874e-03,
1.5238e-02, -5.2449e-02, -6.0418e-02, -4.9638e-02, -4.6189e-02,
-1.4336e-01, -5.0228e-02, 4.5250e-02, -2.3655e-02, 7.4587e-03,
1.2923e-01, 6.1328e-02, 5.7500e-02, 2.4194e-02, -3.2932e-02,
-1.0820e-02, 7.1242e-02, 5.6005e-02, -1.3878e-02, 2.8693e-01,
-5.2415e-02, -7.4341e-02, -1.4338e-02, -9.2830e-02, -7.6373e-03,
-5.4241e-03, -2.9904e-02, -1.9841e-02, 1.0855e-01, -8.1063e-02,
-3.7344e-02, -6.5324e-03, 1.3485e-01, 8.7682e-02, -2.4356e-02,
7.7916e-02, 1.2459e-01, -4.2520e-02, 5.5024e-02, -1.6802e-02,
-1.4720e-04, 2.0956e-02, -3.1374e-02, -1.0519e-01, -2.4640e-02,
-2.6931e-02, 1.4007e-01, 5.0989e-02, -4.5955e-02, -5.6816e-02,
-2.9890e-02, 7.2223e-02, -3.3948e-02, 1.8985e-02, -2.7125e-02,
-2.3935e-03, -2.8839e-02, -2.4901e-02, 4.0219e-02, 9.2962e-02,
9.5073e-02, -1.1031e-02, -6.2019e-02, -4.8016e-02, -1.0641e-01,
1.4060e-01, -1.1940e-02, -9.2820e-02, -1.1954e-01, -1.4386e-02,
-4.4997e-02, -1.7866e-02, -6.9411e-02, -5.5662e-02, 2.5875e-02,
-1.6177e-02, -2.9509e-02, -3.8361e-03, -1.0750e-02, -8.2689e-02,
-4.2440e-02, -6.7098e-02, -3.5123e-02, 4.8586e-03, 7.6470e-02,
-3.5329e-02, -7.1604e-03, 1.4326e-01, -5.2647e-02, 7.3201e-02,
1.5089e-02, -5.0674e-02, 1.0034e-01, 3.9439e-02, -2.4589e-02,
8.1575e-02, 7.8365e-04, -5.1379e-02, -5.6496e-02, -2.8328e-02,
-1.2267e-02, -5.8921e-03, 4.6208e-02, -3.5443e-02, -2.8610e-02,
1.1720e-01, 1.5015e-03, 4.7232e-02, -7.4842e-02, 1.7563e-01,
-6.1315e-02], device='cuda:0')),
('fc_layers.2.weight',
tensor([[-0.1582, -0.0426, 0.1531, ..., -0.0248, -0.0957, -0.0536],
[-0.0247, -0.0347, 0.2120, ..., -0.0812, -0.1250, -0.0350],
[-0.0780, -0.0226, -0.1972, ..., -0.0151, 0.2745, 0.0237],
...,
[-0.0719, -0.0108, -0.0455, ..., 0.0232, -0.1399, 0.0347],
[-0.0507, -0.0524, -0.0824, ..., 0.1039, -0.0394, 0.0593],
[ 0.3541, -0.0756, -0.1536, ..., 0.1519, -0.1378, -0.0454]],
device='cuda:0')),
('fc_layers.2.bias',
tensor([-0.1314, 0.0604, 0.2413, 0.1777, 0.0794, -0.1184, -0.0803, -0.1174,
0.0028, -0.0729], device='cuda:0'))])}]
(Deprecated) 3.2 Experiment 2: Epoch Number and Learning Rate¶
Control candidates for different variables
3.2.1 Design Hyper Parameger¶
Set the candidate hyper parameters.
candidate_epoch_num = [20, 40, 60, 80]
candidate_lr = [1e-3, 1e-4, 1e-5, 1e-6]
From the controlled variables, generate all the possible experiment set.
combinations = list(itertools.product(candidate_epoch_num, candidate_lr))
for combo in combinations:
print(f"[{combo[0]}, {combo[1]:.0e}]", end=" ")
[20, 1e-03] [20, 1e-04] [20, 1e-05] [20, 1e-06] [40, 1e-03] [40, 1e-04] [40, 1e-05] [40, 1e-06] [60, 1e-03] [60, 1e-04] [60, 1e-05] [60, 1e-06] [80, 1e-03] [80, 1e-04] [80, 1e-05] [80, 1e-06]
3.2.2 Train Models¶
Train the models for all the generated hyper-parameter combinations.
def run_experiment(candidate_epoch_num, candidate_lr):
combinations = list(itertools.product(candidate_epoch_num, candidate_lr))
experiments = [] # Experiment Instances
for combo in combinations:
print(f"Performing Experiment: epoch_num={combo[0]}, lr={combo[1]}")
exp_model = SmallVGG().to(device)
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(exp_model.parameters(), lr=combo[1])
num_epochs = combo[0]
train_losses, test_losses = train_and_evaluate(exp_model, train_loader, test_loader, criterion, optimizer, num_epochs)
# One experiment instance
experiments.append({
"num_epoch": combo[0],
"lr": combo[1],
"train_losses": train_losses,
"test_losses": test_losses,
"model_state_dict": exp_model.state_dict()
})
# Prevent CUDA Mem Leak
del exp_model, criterion, optimizer
torch.cuda.empty_cache()
return experiments
experiments = run_experiment(candidate_epoch_num, candidate_lr)
time_str = str(time.time()).replace(".","")
torch.save(experiments, f"./models/experiments_{time_str}.pth")
Performing Experiment: epoch_num=20, lr=0.001
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.61it/s]
Epoch[1/20], Train Loss:273.6250, Test Loss:226.2518
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.61it/s]
Epoch[2/20], Train Loss:192.3438, Test Loss:165.3802
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.26it/s]
Epoch[3/20], Train Loss:158.9808, Test Loss:146.8151
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.58it/s]
Epoch[4/20], Train Loss:144.1323, Test Loss:133.9099
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.24it/s]
Epoch[5/20], Train Loss:135.7931, Test Loss:130.3998
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.60it/s]
Epoch[6/20], Train Loss:129.6459, Test Loss:126.2093
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.39it/s]
Epoch[7/20], Train Loss:126.2750, Test Loss:119.4754
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.14it/s]
Epoch[8/20], Train Loss:122.8730, Test Loss:117.3750
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.43it/s]
Epoch[9/20], Train Loss:120.9373, Test Loss:117.7390
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.53it/s]
Epoch[10/20], Train Loss:119.1836, Test Loss:112.1904
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.45it/s]
Epoch[11/20], Train Loss:116.5669, Test Loss:112.7011
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.93it/s]
Epoch[12/20], Train Loss:114.7737, Test Loss:111.9802
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.17it/s]
Epoch[13/20], Train Loss:114.1235, Test Loss:111.2841
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 35.96it/s]
Epoch[14/20], Train Loss:112.9559, Test Loss:114.3767
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.53it/s]
Epoch[15/20], Train Loss:111.0698, Test Loss:106.3295
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.72it/s]
Epoch[16/20], Train Loss:112.0243, Test Loss:110.1766
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 31.91it/s]
Epoch[17/20], Train Loss:109.7080, Test Loss:107.9069
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.30it/s]
Epoch[18/20], Train Loss:108.5583, Test Loss:105.9967
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.30it/s]
Epoch[19/20], Train Loss:109.5603, Test Loss:103.4721
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.84it/s]
Epoch[20/20], Train Loss:108.3114, Test Loss:105.9967 Performing Experiment: epoch_num=20, lr=0.0001
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.12it/s]
Epoch[1/20], Train Loss:286.3824, Test Loss:282.5474
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.02it/s]
Epoch[2/20], Train Loss:277.8285, Test Loss:261.4198
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 31.92it/s]
Epoch[3/20], Train Loss:247.3757, Test Loss:232.7131
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.88it/s]
Epoch[4/20], Train Loss:229.2325, Test Loss:218.5004
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.60it/s]
Epoch[5/20], Train Loss:216.0349, Test Loss:208.2404
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.75it/s]
Epoch[6/20], Train Loss:205.9314, Test Loss:199.8663
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.76it/s]
Epoch[7/20], Train Loss:196.4219, Test Loss:189.1829
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.33it/s]
Epoch[8/20], Train Loss:190.0487, Test Loss:181.5063
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.73it/s]
Epoch[9/20], Train Loss:183.7670, Test Loss:181.0961
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.63it/s]
Epoch[10/20], Train Loss:179.1436, Test Loss:171.9339
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.32it/s]
Epoch[11/20], Train Loss:175.6870, Test Loss:169.1723
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.66it/s]
Epoch[12/20], Train Loss:171.8809, Test Loss:165.4287
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.74it/s]
Epoch[13/20], Train Loss:168.3930, Test Loss:164.0902
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.57it/s]
Epoch[14/20], Train Loss:165.6571, Test Loss:161.6520
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.82it/s]
Epoch[15/20], Train Loss:163.5155, Test Loss:156.4228
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 35.94it/s]
Epoch[16/20], Train Loss:160.6063, Test Loss:153.5970
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.25it/s]
Epoch[17/20], Train Loss:157.8511, Test Loss:153.4596
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.77it/s]
Epoch[18/20], Train Loss:156.5984, Test Loss:148.7951
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.98it/s]
Epoch[19/20], Train Loss:152.8333, Test Loss:147.1627
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.40it/s]
Epoch[20/20], Train Loss:151.1984, Test Loss:146.9617 Performing Experiment: epoch_num=20, lr=1e-05
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.56it/s]
Epoch[1/20], Train Loss:290.2158, Test Loss:284.0060
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.66it/s]
Epoch[2/20], Train Loss:286.0251, Test Loss:283.6935
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.18it/s]
Epoch[3/20], Train Loss:285.9170, Test Loss:283.6253
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.19it/s]
Epoch[4/20], Train Loss:285.8377, Test Loss:283.4673
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.47it/s]
Epoch[5/20], Train Loss:285.7950, Test Loss:283.5199
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.75it/s]
Epoch[6/20], Train Loss:285.6924, Test Loss:283.3490
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.17it/s]
Epoch[7/20], Train Loss:285.4898, Test Loss:282.9781
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.25it/s]
Epoch[8/20], Train Loss:285.0739, Test Loss:282.2394
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.59it/s]
Epoch[9/20], Train Loss:284.0615, Test Loss:280.2230
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.33it/s]
Epoch[10/20], Train Loss:281.5654, Test Loss:276.3494
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.42it/s]
Epoch[11/20], Train Loss:276.9703, Test Loss:271.2993
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.06it/s]
Epoch[12/20], Train Loss:271.3448, Test Loss:265.7829
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.49it/s]
Epoch[13/20], Train Loss:266.6732, Test Loss:263.1316
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.45it/s]
Epoch[14/20], Train Loss:263.4424, Test Loss:259.5436
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.00it/s]
Epoch[15/20], Train Loss:260.9711, Test Loss:258.2017
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.21it/s]
Epoch[16/20], Train Loss:259.3022, Test Loss:257.2764
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.99it/s]
Epoch[17/20], Train Loss:257.5728, Test Loss:256.4187
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.72it/s]
Epoch[18/20], Train Loss:256.3358, Test Loss:254.4685
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.39it/s]
Epoch[19/20], Train Loss:255.3257, Test Loss:254.6504
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.82it/s]
Epoch[20/20], Train Loss:254.0659, Test Loss:252.7717 Performing Experiment: epoch_num=20, lr=1e-06
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.30it/s]
Epoch[1/20], Train Loss:295.6317, Test Loss:295.0451
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.28it/s]
Epoch[2/20], Train Loss:295.1768, Test Loss:294.5195
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.45it/s]
Epoch[3/20], Train Loss:294.6344, Test Loss:293.8320
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.40it/s]
Epoch[4/20], Train Loss:293.8402, Test Loss:292.7308
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 37.26it/s]
Epoch[5/20], Train Loss:292.4189, Test Loss:290.4903
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.90it/s]
Epoch[6/20], Train Loss:289.9625, Test Loss:287.3102
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:14<00:00, 38.37it/s]
Epoch[7/20], Train Loss:287.6297, Test Loss:285.1984
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.30it/s]
Epoch[8/20], Train Loss:286.6804, Test Loss:284.5516
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.38it/s]
Epoch[9/20], Train Loss:286.3947, Test Loss:284.2665
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.45it/s]
Epoch[10/20], Train Loss:286.2675, Test Loss:284.1292
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.28it/s]
Epoch[11/20], Train Loss:286.2180, Test Loss:284.0686
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.41it/s]
Epoch[12/20], Train Loss:286.1664, Test Loss:284.0337
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.34it/s]
Epoch[13/20], Train Loss:286.1440, Test Loss:283.9513
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.62it/s]
Epoch[14/20], Train Loss:286.1358, Test Loss:283.9522
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.24it/s]
Epoch[15/20], Train Loss:286.1174, Test Loss:283.9397
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.29it/s]
Epoch[16/20], Train Loss:286.1081, Test Loss:283.8998
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.53it/s]
Epoch[17/20], Train Loss:286.1045, Test Loss:283.9459
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 37.80it/s]
Epoch[18/20], Train Loss:286.0845, Test Loss:283.8653
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.69it/s]
Epoch[19/20], Train Loss:286.0876, Test Loss:283.8984
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.78it/s]
Epoch[20/20], Train Loss:286.0620, Test Loss:283.8739 Performing Experiment: epoch_num=40, lr=0.001
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.06it/s]
Epoch[1/40], Train Loss:252.5546, Test Loss:198.3760
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.61it/s]
Epoch[2/40], Train Loss:176.1080, Test Loss:156.2775
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.80it/s]
Epoch[3/40], Train Loss:149.5769, Test Loss:138.1545
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.90it/s]
Epoch[4/40], Train Loss:137.5053, Test Loss:132.4482
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.51it/s]
Epoch[5/40], Train Loss:131.3698, Test Loss:126.0467
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.67it/s]
Epoch[6/40], Train Loss:126.8446, Test Loss:117.8059
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:14<00:00, 39.98it/s]
Epoch[7/40], Train Loss:121.6506, Test Loss:117.4404
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:14<00:00, 40.86it/s]
Epoch[8/40], Train Loss:119.9389, Test Loss:112.2045
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:14<00:00, 40.04it/s]
Epoch[9/40], Train Loss:118.0162, Test Loss:113.9959
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:14<00:00, 40.56it/s]
Epoch[10/40], Train Loss:115.2518, Test Loss:110.4475
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 37.24it/s]
Epoch[11/40], Train Loss:114.9973, Test Loss:109.8806
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.01it/s]
Epoch[12/40], Train Loss:112.9299, Test Loss:105.4914
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.99it/s]
Epoch[13/40], Train Loss:112.1285, Test Loss:108.4001
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.55it/s]
Epoch[14/40], Train Loss:111.8850, Test Loss:105.5100
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.85it/s]
Epoch[15/40], Train Loss:110.6797, Test Loss:107.2092
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.40it/s]
Epoch[16/40], Train Loss:109.7235, Test Loss:107.9702
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.58it/s]
Epoch[17/40], Train Loss:109.5188, Test Loss:103.6870
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.18it/s]
Epoch[18/40], Train Loss:108.2240, Test Loss:104.9807
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.85it/s]
Epoch[19/40], Train Loss:108.1700, Test Loss:103.0870
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.51it/s]
Epoch[20/40], Train Loss:107.6236, Test Loss:104.1072
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.91it/s]
Epoch[21/40], Train Loss:106.7039, Test Loss:103.1942
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.77it/s]
Epoch[22/40], Train Loss:106.2380, Test Loss:98.7930
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.24it/s]
Epoch[23/40], Train Loss:104.8741, Test Loss:103.6761
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.42it/s]
Epoch[24/40], Train Loss:105.2382, Test Loss:101.0090
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.91it/s]
Epoch[25/40], Train Loss:106.1663, Test Loss:99.8675
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.83it/s]
Epoch[26/40], Train Loss:104.2536, Test Loss:100.7559
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.95it/s]
Epoch[27/40], Train Loss:103.6044, Test Loss:100.3231
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.33it/s]
Epoch[28/40], Train Loss:104.3160, Test Loss:98.0720
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.14it/s]
Epoch[29/40], Train Loss:103.6296, Test Loss:97.5014
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.99it/s]
Epoch[30/40], Train Loss:103.0167, Test Loss:98.2294
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.34it/s]
Epoch[31/40], Train Loss:102.3464, Test Loss:98.1610
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.34it/s]
Epoch[32/40], Train Loss:103.2416, Test Loss:95.4067
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.12it/s]
Epoch[33/40], Train Loss:102.9440, Test Loss:95.7281
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.08it/s]
Epoch[34/40], Train Loss:102.4315, Test Loss:97.5378
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.15it/s]
Epoch[35/40], Train Loss:102.5919, Test Loss:98.1286
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.45it/s]
Epoch[36/40], Train Loss:102.8453, Test Loss:96.1490
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.04it/s]
Epoch[37/40], Train Loss:102.1551, Test Loss:97.1761
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 33.74it/s]
Epoch[38/40], Train Loss:101.6684, Test Loss:96.0527
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.76it/s]
Epoch[39/40], Train Loss:102.4429, Test Loss:97.5615
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.44it/s]
Epoch[40/40], Train Loss:100.7082, Test Loss:97.2595 Performing Experiment: epoch_num=40, lr=0.0001
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.29it/s]
Epoch[1/40], Train Loss:286.4365, Test Loss:283.3193
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.73it/s]
Epoch[2/40], Train Loss:276.2484, Test Loss:259.4074
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.41it/s]
Epoch[3/40], Train Loss:250.3338, Test Loss:243.5947
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.72it/s]
Epoch[4/40], Train Loss:235.6773, Test Loss:225.8585
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.89it/s]
Epoch[5/40], Train Loss:222.8683, Test Loss:219.1508
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.06it/s]
Epoch[6/40], Train Loss:213.6142, Test Loss:209.4114
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.10it/s]
Epoch[7/40], Train Loss:207.3052, Test Loss:200.4252
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 35.91it/s]
Epoch[8/40], Train Loss:201.5742, Test Loss:194.0860
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 38.09it/s]
Epoch[9/40], Train Loss:193.8038, Test Loss:187.6225
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 37.33it/s]
Epoch[10/40], Train Loss:187.8839, Test Loss:183.8923
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:14<00:00, 38.48it/s]
Epoch[11/40], Train Loss:182.7916, Test Loss:175.6935
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 37.02it/s]
Epoch[12/40], Train Loss:177.5759, Test Loss:171.8337
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.65it/s]
Epoch[13/40], Train Loss:173.9070, Test Loss:167.4274
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.66it/s]
Epoch[14/40], Train Loss:170.8607, Test Loss:164.6920
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 34.00it/s]
Epoch[15/40], Train Loss:166.8828, Test Loss:160.3560
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 38.12it/s]
Epoch[16/40], Train Loss:163.6345, Test Loss:157.9022
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:16<00:00, 35.01it/s]
Epoch[17/40], Train Loss:161.3861, Test Loss:155.4753
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:15<00:00, 36.99it/s]
Epoch[18/40], Train Loss:158.7531, Test Loss:154.7545
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.28it/s]
Epoch[19/40], Train Loss:156.2212, Test Loss:152.9082
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.73it/s]
Epoch[20/40], Train Loss:153.3404, Test Loss:149.8849
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.84it/s]
Epoch[21/40], Train Loss:153.0120, Test Loss:147.6294
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.08it/s]
Epoch[22/40], Train Loss:150.2570, Test Loss:144.7271
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.18it/s]
Epoch[23/40], Train Loss:147.8549, Test Loss:145.0822
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.27it/s]
Epoch[24/40], Train Loss:145.9972, Test Loss:142.5783
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.35it/s]
Epoch[25/40], Train Loss:144.6299, Test Loss:139.4967
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.72it/s]
Epoch[26/40], Train Loss:143.4625, Test Loss:136.7219
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.34it/s]
Epoch[27/40], Train Loss:141.9160, Test Loss:137.0666
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.02it/s]
Epoch[28/40], Train Loss:141.0530, Test Loss:135.1106
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.68it/s]
Epoch[29/40], Train Loss:139.5733, Test Loss:133.7263
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.85it/s]
Epoch[30/40], Train Loss:138.3087, Test Loss:132.2758
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.74it/s]
Epoch[31/40], Train Loss:136.7301, Test Loss:131.2139
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.81it/s]
Epoch[32/40], Train Loss:135.3149, Test Loss:133.6783
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.25it/s]
Epoch[33/40], Train Loss:134.6403, Test Loss:129.7076
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.39it/s]
Epoch[34/40], Train Loss:133.5803, Test Loss:128.8422
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.02it/s]
Epoch[35/40], Train Loss:132.4256, Test Loss:127.5469
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.56it/s]
Epoch[36/40], Train Loss:131.2303, Test Loss:129.9772
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.27it/s]
Epoch[37/40], Train Loss:130.1258, Test Loss:126.9177
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.38it/s]
Epoch[38/40], Train Loss:130.0177, Test Loss:125.8382
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.37it/s]
Epoch[39/40], Train Loss:129.3370, Test Loss:123.4188
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.52it/s]
Epoch[40/40], Train Loss:127.8257, Test Loss:124.4013 Performing Experiment: epoch_num=40, lr=1e-05
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.56it/s]
Epoch[1/40], Train Loss:289.9424, Test Loss:283.8720
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.59it/s]
Epoch[2/40], Train Loss:286.1301, Test Loss:283.6979
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.08it/s]
Epoch[3/40], Train Loss:285.9707, Test Loss:283.6336
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.24it/s]
Epoch[4/40], Train Loss:285.8522, Test Loss:283.5098
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.15it/s]
Epoch[5/40], Train Loss:285.7708, Test Loss:283.5449
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.16it/s]
Epoch[6/40], Train Loss:285.6705, Test Loss:283.3643
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.83it/s]
Epoch[7/40], Train Loss:285.5570, Test Loss:283.1740
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.14it/s]
Epoch[8/40], Train Loss:285.3613, Test Loss:282.7917
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.55it/s]
Epoch[9/40], Train Loss:285.0162, Test Loss:282.5598
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.41it/s]
Epoch[10/40], Train Loss:284.4857, Test Loss:281.9348
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.11it/s]
Epoch[11/40], Train Loss:283.4750, Test Loss:280.3187
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.66it/s]
Epoch[12/40], Train Loss:281.8663, Test Loss:277.9629
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.12it/s]
Epoch[13/40], Train Loss:279.2368, Test Loss:274.5834
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.23it/s]
Epoch[14/40], Train Loss:275.5871, Test Loss:270.7961
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.18it/s]
Epoch[15/40], Train Loss:271.8243, Test Loss:266.3494
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.38it/s]
Epoch[16/40], Train Loss:268.5173, Test Loss:264.4293
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.66it/s]
Epoch[17/40], Train Loss:265.9794, Test Loss:262.5643
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.44it/s]
Epoch[18/40], Train Loss:264.1683, Test Loss:261.2106
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.95it/s]
Epoch[19/40], Train Loss:262.5633, Test Loss:260.0413
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.11it/s]
Epoch[20/40], Train Loss:261.2260, Test Loss:258.6385
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.89it/s]
Epoch[21/40], Train Loss:259.9555, Test Loss:257.4064
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.76it/s]
Epoch[22/40], Train Loss:258.4210, Test Loss:255.9947
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.24it/s]
Epoch[23/40], Train Loss:257.6317, Test Loss:254.6396
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.51it/s]
Epoch[24/40], Train Loss:255.8194, Test Loss:254.6260
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.56it/s]
Epoch[25/40], Train Loss:254.7669, Test Loss:252.9773
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.86it/s]
Epoch[26/40], Train Loss:253.7477, Test Loss:252.0643
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.24it/s]
Epoch[27/40], Train Loss:253.3600, Test Loss:251.0859
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.47it/s]
Epoch[28/40], Train Loss:251.4466, Test Loss:250.2249
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.78it/s]
Epoch[29/40], Train Loss:250.7752, Test Loss:247.9011
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.77it/s]
Epoch[30/40], Train Loss:250.0992, Test Loss:249.3162
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.74it/s]
Epoch[31/40], Train Loss:248.6201, Test Loss:246.7114
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.21it/s]
Epoch[32/40], Train Loss:247.1963, Test Loss:245.5311
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.61it/s]
Epoch[33/40], Train Loss:245.8526, Test Loss:244.0514
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.63it/s]
Epoch[34/40], Train Loss:244.6200, Test Loss:242.6640
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.13it/s]
Epoch[35/40], Train Loss:243.8005, Test Loss:243.5267
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.43it/s]
Epoch[36/40], Train Loss:243.3290, Test Loss:241.8712
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.17it/s]
Epoch[37/40], Train Loss:241.4309, Test Loss:239.2189
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.25it/s]
Epoch[38/40], Train Loss:240.5895, Test Loss:237.7867
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.48it/s]
Epoch[39/40], Train Loss:239.7811, Test Loss:238.6575
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.59it/s]
Epoch[40/40], Train Loss:238.1266, Test Loss:236.9292 Performing Experiment: epoch_num=40, lr=1e-06
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.13it/s]
Epoch[1/40], Train Loss:294.1970, Test Loss:293.3686
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.51it/s]
Epoch[2/40], Train Loss:293.6697, Test Loss:292.6483
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.98it/s]
Epoch[3/40], Train Loss:292.8289, Test Loss:291.3830
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.69it/s]
Epoch[4/40], Train Loss:291.2424, Test Loss:288.8480
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.13it/s]
Epoch[5/40], Train Loss:288.6997, Test Loss:285.7693
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.23it/s]
Epoch[6/40], Train Loss:286.8086, Test Loss:284.4054
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.41it/s]
Epoch[7/40], Train Loss:286.2995, Test Loss:284.1282
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.60it/s]
Epoch[8/40], Train Loss:286.1978, Test Loss:284.0558
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.47it/s]
Epoch[9/40], Train Loss:286.1487, Test Loss:284.0121
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.89it/s]
Epoch[10/40], Train Loss:286.1266, Test Loss:283.9756
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.49it/s]
Epoch[11/40], Train Loss:286.0823, Test Loss:283.9181
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.04it/s]
Epoch[12/40], Train Loss:286.0681, Test Loss:283.9073
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.10it/s]
Epoch[13/40], Train Loss:286.0454, Test Loss:283.9018
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.38it/s]
Epoch[14/40], Train Loss:286.0397, Test Loss:283.8455
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.60it/s]
Epoch[15/40], Train Loss:286.0335, Test Loss:283.8679
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.15it/s]
Epoch[16/40], Train Loss:286.0137, Test Loss:283.8637
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.13it/s]
Epoch[17/40], Train Loss:286.0131, Test Loss:283.8414
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.08it/s]
Epoch[18/40], Train Loss:285.9974, Test Loss:283.7944
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.86it/s]
Epoch[19/40], Train Loss:285.9874, Test Loss:283.8000
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.37it/s]
Epoch[20/40], Train Loss:285.9849, Test Loss:283.8054
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.12it/s]
Epoch[21/40], Train Loss:285.9937, Test Loss:283.7820
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.63it/s]
Epoch[22/40], Train Loss:285.9757, Test Loss:283.7979
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.26it/s]
Epoch[23/40], Train Loss:285.9643, Test Loss:283.7493
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.85it/s]
Epoch[24/40], Train Loss:285.9544, Test Loss:283.7730
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.33it/s]
Epoch[25/40], Train Loss:285.9523, Test Loss:283.7589
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.79it/s]
Epoch[26/40], Train Loss:285.9429, Test Loss:283.7508
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.41it/s]
Epoch[27/40], Train Loss:285.9523, Test Loss:283.7146
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.57it/s]
Epoch[28/40], Train Loss:285.9319, Test Loss:283.7250
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.60it/s]
Epoch[29/40], Train Loss:285.9263, Test Loss:283.7215
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.27it/s]
Epoch[30/40], Train Loss:285.9174, Test Loss:283.7334
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.62it/s]
Epoch[31/40], Train Loss:285.9163, Test Loss:283.6971
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.29it/s]
Epoch[32/40], Train Loss:285.9020, Test Loss:283.6989
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.25it/s]
Epoch[33/40], Train Loss:285.9120, Test Loss:283.7106
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.64it/s]
Epoch[34/40], Train Loss:285.8898, Test Loss:283.6811
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.59it/s]
Epoch[35/40], Train Loss:285.8885, Test Loss:283.6662
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.25it/s]
Epoch[36/40], Train Loss:285.8890, Test Loss:283.7031
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.98it/s]
Epoch[37/40], Train Loss:285.8862, Test Loss:283.6527
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.61it/s]
Epoch[38/40], Train Loss:285.8697, Test Loss:283.6671
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.35it/s]
Epoch[39/40], Train Loss:285.8675, Test Loss:283.6427
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.43it/s]
Epoch[40/40], Train Loss:285.8620, Test Loss:283.6609 Performing Experiment: epoch_num=60, lr=0.001
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.28it/s]
Epoch[1/60], Train Loss:279.5896, Test Loss:237.0638
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.22it/s]
Epoch[2/60], Train Loss:197.1944, Test Loss:168.9421
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.81it/s]
Epoch[3/60], Train Loss:159.7089, Test Loss:147.3067
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.04it/s]
Epoch[4/60], Train Loss:144.7584, Test Loss:140.0285
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.42it/s]
Epoch[5/60], Train Loss:136.5747, Test Loss:128.7315
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.37it/s]
Epoch[6/60], Train Loss:129.9023, Test Loss:122.8786
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.62it/s]
Epoch[7/60], Train Loss:126.9876, Test Loss:123.6097
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.32it/s]
Epoch[8/60], Train Loss:124.6172, Test Loss:121.0266
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.84it/s]
Epoch[9/60], Train Loss:122.0433, Test Loss:115.3081
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.63it/s]
Epoch[10/60], Train Loss:120.2397, Test Loss:119.1607
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.09it/s]
Epoch[11/60], Train Loss:117.8647, Test Loss:114.1192
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.29it/s]
Epoch[12/60], Train Loss:117.3268, Test Loss:112.4019
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.22it/s]
Epoch[13/60], Train Loss:115.2268, Test Loss:111.7822
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.61it/s]
Epoch[14/60], Train Loss:114.1809, Test Loss:108.4389
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.52it/s]
Epoch[15/60], Train Loss:112.9903, Test Loss:108.5978
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.78it/s]
Epoch[16/60], Train Loss:111.7912, Test Loss:108.5702
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.91it/s]
Epoch[17/60], Train Loss:111.0534, Test Loss:103.5053
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.54it/s]
Epoch[18/60], Train Loss:110.6719, Test Loss:107.6488
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.51it/s]
Epoch[19/60], Train Loss:109.3007, Test Loss:105.9243
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.75it/s]
Epoch[20/60], Train Loss:108.5398, Test Loss:105.7840
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.85it/s]
Epoch[21/60], Train Loss:108.6995, Test Loss:106.1354
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.91it/s]
Epoch[22/60], Train Loss:107.9198, Test Loss:102.4998
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.96it/s]
Epoch[23/60], Train Loss:107.6220, Test Loss:102.1166
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.31it/s]
Epoch[24/60], Train Loss:107.5716, Test Loss:100.9168
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.88it/s]
Epoch[25/60], Train Loss:107.7587, Test Loss:106.3693
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.97it/s]
Epoch[26/60], Train Loss:106.7119, Test Loss:101.6466
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.66it/s]
Epoch[27/60], Train Loss:105.7480, Test Loss:104.4696
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.43it/s]
Epoch[28/60], Train Loss:105.2937, Test Loss:101.5035
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.55it/s]
Epoch[29/60], Train Loss:105.4270, Test Loss:103.1475
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.10it/s]
Epoch[30/60], Train Loss:105.5109, Test Loss:102.0397
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.72it/s]
Epoch[31/60], Train Loss:105.4420, Test Loss:101.6189
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.94it/s]
Epoch[32/60], Train Loss:105.6057, Test Loss:98.3133
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.39it/s]
Epoch[33/60], Train Loss:104.6385, Test Loss:99.8820
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.92it/s]
Epoch[34/60], Train Loss:103.2397, Test Loss:99.0649
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.76it/s]
Epoch[35/60], Train Loss:102.8302, Test Loss:98.0512
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.20it/s]
Epoch[36/60], Train Loss:103.5704, Test Loss:100.0085
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.69it/s]
Epoch[37/60], Train Loss:102.4139, Test Loss:99.3036
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.42it/s]
Epoch[38/60], Train Loss:103.8804, Test Loss:97.6521
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.67it/s]
Epoch[39/60], Train Loss:102.7656, Test Loss:97.9623
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.96it/s]
Epoch[40/60], Train Loss:102.5889, Test Loss:96.6777
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.73it/s]
Epoch[41/60], Train Loss:102.8004, Test Loss:95.8969
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.77it/s]
Epoch[42/60], Train Loss:102.8066, Test Loss:97.5705
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.78it/s]
Epoch[43/60], Train Loss:101.6757, Test Loss:97.2087
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.19it/s]
Epoch[44/60], Train Loss:102.3364, Test Loss:98.8588
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.92it/s]
Epoch[45/60], Train Loss:101.4713, Test Loss:97.5596
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.56it/s]
Epoch[46/60], Train Loss:101.0782, Test Loss:96.6383
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.32it/s]
Epoch[47/60], Train Loss:101.5841, Test Loss:96.7073
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.64it/s]
Epoch[48/60], Train Loss:101.0128, Test Loss:95.6835
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.15it/s]
Epoch[49/60], Train Loss:101.5009, Test Loss:95.9767
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.30it/s]
Epoch[50/60], Train Loss:100.5052, Test Loss:95.4721
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.36it/s]
Epoch[51/60], Train Loss:100.6094, Test Loss:94.8461
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.47it/s]
Epoch[52/60], Train Loss:100.4600, Test Loss:98.5045
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.83it/s]
Epoch[53/60], Train Loss:100.0857, Test Loss:94.7737
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.89it/s]
Epoch[54/60], Train Loss:99.8005, Test Loss:97.4381
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.90it/s]
Epoch[55/60], Train Loss:100.1032, Test Loss:94.8192
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.04it/s]
Epoch[56/60], Train Loss:100.6854, Test Loss:94.3062
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.70it/s]
Epoch[57/60], Train Loss:100.1097, Test Loss:99.4643
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.41it/s]
Epoch[58/60], Train Loss:99.5613, Test Loss:95.4228
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.75it/s]
Epoch[59/60], Train Loss:99.2754, Test Loss:93.5620
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.73it/s]
Epoch[60/60], Train Loss:100.1393, Test Loss:95.1145 Performing Experiment: epoch_num=60, lr=0.0001
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.08it/s]
Epoch[1/60], Train Loss:286.5047, Test Loss:283.3940
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.97it/s]
Epoch[2/60], Train Loss:282.4395, Test Loss:266.5137
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.79it/s]
Epoch[3/60], Train Loss:256.0352, Test Loss:241.4241
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.23it/s]
Epoch[4/60], Train Loss:233.7830, Test Loss:228.2312
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.43it/s]
Epoch[5/60], Train Loss:221.6850, Test Loss:214.7887
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.31it/s]
Epoch[6/60], Train Loss:212.7405, Test Loss:208.6650
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.33it/s]
Epoch[7/60], Train Loss:204.8232, Test Loss:195.5997
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.33it/s]
Epoch[8/60], Train Loss:197.7622, Test Loss:190.4414
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.34it/s]
Epoch[9/60], Train Loss:191.6997, Test Loss:183.5122
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.09it/s]
Epoch[10/60], Train Loss:185.9060, Test Loss:178.7616
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.15it/s]
Epoch[11/60], Train Loss:181.0036, Test Loss:176.4023
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.51it/s]
Epoch[12/60], Train Loss:177.7134, Test Loss:172.3654
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.21it/s]
Epoch[13/60], Train Loss:173.5096, Test Loss:168.0623
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.03it/s]
Epoch[14/60], Train Loss:170.6168, Test Loss:167.6542
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.24it/s]
Epoch[15/60], Train Loss:167.0332, Test Loss:164.2903
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.57it/s]
Epoch[16/60], Train Loss:164.0549, Test Loss:156.0402
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.14it/s]
Epoch[17/60], Train Loss:161.4819, Test Loss:157.1026
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.36it/s]
Epoch[18/60], Train Loss:157.9669, Test Loss:156.7541
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.19it/s]
Epoch[19/60], Train Loss:157.3066, Test Loss:152.3425
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.67it/s]
Epoch[20/60], Train Loss:154.4249, Test Loss:151.4452
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.61it/s]
Epoch[21/60], Train Loss:153.4310, Test Loss:146.7105
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.51it/s]
Epoch[22/60], Train Loss:150.8847, Test Loss:146.1096
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.29it/s]
Epoch[23/60], Train Loss:150.0695, Test Loss:144.2308
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.62it/s]
Epoch[24/60], Train Loss:146.9812, Test Loss:143.4688
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.97it/s]
Epoch[25/60], Train Loss:146.7492, Test Loss:140.8589
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.33it/s]
Epoch[26/60], Train Loss:145.4735, Test Loss:138.5535
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.10it/s]
Epoch[27/60], Train Loss:142.7452, Test Loss:138.4652
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.00it/s]
Epoch[28/60], Train Loss:141.6260, Test Loss:135.1474
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.07it/s]
Epoch[29/60], Train Loss:140.7780, Test Loss:135.9733
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.95it/s]
Epoch[30/60], Train Loss:139.3273, Test Loss:132.7520
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.80it/s]
Epoch[31/60], Train Loss:139.3704, Test Loss:133.0283
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.05it/s]
Epoch[32/60], Train Loss:137.4609, Test Loss:131.9358
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.74it/s]
Epoch[33/60], Train Loss:136.9889, Test Loss:133.1886
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.47it/s]
Epoch[34/60], Train Loss:135.9388, Test Loss:131.3652
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.57it/s]
Epoch[35/60], Train Loss:135.3389, Test Loss:131.6194
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.84it/s]
Epoch[36/60], Train Loss:133.6476, Test Loss:129.4589
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.87it/s]
Epoch[37/60], Train Loss:132.8300, Test Loss:131.1267
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.75it/s]
Epoch[38/60], Train Loss:133.4121, Test Loss:127.9512
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.79it/s]
Epoch[39/60], Train Loss:131.8632, Test Loss:127.1084
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.58it/s]
Epoch[40/60], Train Loss:130.0388, Test Loss:124.7922
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.65it/s]
Epoch[41/60], Train Loss:130.2393, Test Loss:125.1812
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.35it/s]
Epoch[42/60], Train Loss:129.4709, Test Loss:124.1794
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.91it/s]
Epoch[43/60], Train Loss:128.6651, Test Loss:124.6473
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.17it/s]
Epoch[44/60], Train Loss:127.9359, Test Loss:126.4506
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.80it/s]
Epoch[45/60], Train Loss:127.7065, Test Loss:123.1838
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.43it/s]
Epoch[46/60], Train Loss:127.3556, Test Loss:121.2169
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.07it/s]
Epoch[47/60], Train Loss:126.4203, Test Loss:122.3986
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.46it/s]
Epoch[48/60], Train Loss:126.3399, Test Loss:122.5647
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.04it/s]
Epoch[49/60], Train Loss:125.0870, Test Loss:121.2306
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.52it/s]
Epoch[50/60], Train Loss:124.0804, Test Loss:120.6414
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.22it/s]
Epoch[51/60], Train Loss:124.0359, Test Loss:119.9407
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.80it/s]
Epoch[52/60], Train Loss:124.2094, Test Loss:121.1961
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.21it/s]
Epoch[53/60], Train Loss:123.0214, Test Loss:119.1811
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.20it/s]
Epoch[54/60], Train Loss:122.8831, Test Loss:117.3293
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.71it/s]
Epoch[55/60], Train Loss:122.9344, Test Loss:117.4565
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.48it/s]
Epoch[56/60], Train Loss:121.6462, Test Loss:119.8136
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.34it/s]
Epoch[57/60], Train Loss:120.6134, Test Loss:117.9638
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.01it/s]
Epoch[58/60], Train Loss:120.6958, Test Loss:116.2862
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.17it/s]
Epoch[59/60], Train Loss:119.5163, Test Loss:115.1409
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.64it/s]
Epoch[60/60], Train Loss:119.8725, Test Loss:114.7411 Performing Experiment: epoch_num=60, lr=1e-05
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.15it/s]
Epoch[1/60], Train Loss:289.1080, Test Loss:283.9554
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.54it/s]
Epoch[2/60], Train Loss:286.0416, Test Loss:283.8020
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.32it/s]
Epoch[3/60], Train Loss:285.9524, Test Loss:283.7146
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.42it/s]
Epoch[4/60], Train Loss:285.8713, Test Loss:283.5757
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.31it/s]
Epoch[5/60], Train Loss:285.7892, Test Loss:283.4460
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.56it/s]
Epoch[6/60], Train Loss:285.7000, Test Loss:283.3627
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.39it/s]
Epoch[7/60], Train Loss:285.5042, Test Loss:283.0507
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.70it/s]
Epoch[8/60], Train Loss:285.2221, Test Loss:282.6372
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.14it/s]
Epoch[9/60], Train Loss:284.6510, Test Loss:281.5582
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.06it/s]
Epoch[10/60], Train Loss:283.6569, Test Loss:280.1372
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.25it/s]
Epoch[11/60], Train Loss:282.2896, Test Loss:278.8133
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.43it/s]
Epoch[12/60], Train Loss:280.5857, Test Loss:276.8093
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.47it/s]
Epoch[13/60], Train Loss:278.6745, Test Loss:275.0183
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.73it/s]
Epoch[14/60], Train Loss:276.6650, Test Loss:273.0095
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.66it/s]
Epoch[15/60], Train Loss:274.7768, Test Loss:271.0455
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.19it/s]
Epoch[16/60], Train Loss:272.5207, Test Loss:268.7311
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.88it/s]
Epoch[17/60], Train Loss:270.4622, Test Loss:266.9376
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.75it/s]
Epoch[18/60], Train Loss:268.0717, Test Loss:265.5712
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.51it/s]
Epoch[19/60], Train Loss:266.3763, Test Loss:262.6938
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.54it/s]
Epoch[20/60], Train Loss:264.1425, Test Loss:261.8564
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.59it/s]
Epoch[21/60], Train Loss:263.0534, Test Loss:259.9804
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.65it/s]
Epoch[22/60], Train Loss:261.1938, Test Loss:257.2965
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.91it/s]
Epoch[23/60], Train Loss:259.5084, Test Loss:255.6820
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.81it/s]
Epoch[24/60], Train Loss:257.9390, Test Loss:255.2046
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.78it/s]
Epoch[25/60], Train Loss:256.1297, Test Loss:254.4101
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.53it/s]
Epoch[26/60], Train Loss:254.6991, Test Loss:250.7693
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.45it/s]
Epoch[27/60], Train Loss:253.3018, Test Loss:250.6309
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.08it/s]
Epoch[28/60], Train Loss:252.4840, Test Loss:248.8204
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.74it/s]
Epoch[29/60], Train Loss:250.7187, Test Loss:247.6244
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.87it/s]
Epoch[30/60], Train Loss:250.4660, Test Loss:247.4309
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.06it/s]
Epoch[31/60], Train Loss:248.7528, Test Loss:244.7443
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.95it/s]
Epoch[32/60], Train Loss:248.0600, Test Loss:244.1919
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.95it/s]
Epoch[33/60], Train Loss:246.7866, Test Loss:244.1057
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.72it/s]
Epoch[34/60], Train Loss:245.8569, Test Loss:241.9872
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.45it/s]
Epoch[35/60], Train Loss:244.8289, Test Loss:241.2885
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.73it/s]
Epoch[36/60], Train Loss:244.0705, Test Loss:240.6364
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.10it/s]
Epoch[37/60], Train Loss:243.1802, Test Loss:239.3293
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.97it/s]
Epoch[38/60], Train Loss:241.9260, Test Loss:238.7897
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.66it/s]
Epoch[39/60], Train Loss:241.4913, Test Loss:237.2577
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.86it/s]
Epoch[40/60], Train Loss:241.0558, Test Loss:236.8197
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.21it/s]
Epoch[41/60], Train Loss:239.9224, Test Loss:236.7752
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.92it/s]
Epoch[42/60], Train Loss:239.5047, Test Loss:237.5799
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.23it/s]
Epoch[43/60], Train Loss:238.8140, Test Loss:233.6011
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.81it/s]
Epoch[44/60], Train Loss:237.6730, Test Loss:234.6586
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.70it/s]
Epoch[45/60], Train Loss:237.1543, Test Loss:233.1263
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.11it/s]
Epoch[46/60], Train Loss:236.8549, Test Loss:233.8907
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.55it/s]
Epoch[47/60], Train Loss:235.3020, Test Loss:232.8559
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.59it/s]
Epoch[48/60], Train Loss:234.9974, Test Loss:232.2185
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.96it/s]
Epoch[49/60], Train Loss:233.9422, Test Loss:233.1751
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.80it/s]
Epoch[50/60], Train Loss:232.8943, Test Loss:230.3796
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.70it/s]
Epoch[51/60], Train Loss:233.5106, Test Loss:229.3095
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.22it/s]
Epoch[52/60], Train Loss:232.5923, Test Loss:230.4238
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.10it/s]
Epoch[53/60], Train Loss:231.9872, Test Loss:229.6225
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.98it/s]
Epoch[54/60], Train Loss:231.1767, Test Loss:227.1584
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.39it/s]
Epoch[55/60], Train Loss:231.0475, Test Loss:226.4383
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.63it/s]
Epoch[56/60], Train Loss:230.4636, Test Loss:226.4650
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.22it/s]
Epoch[57/60], Train Loss:229.1887, Test Loss:226.7338
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.25it/s]
Epoch[58/60], Train Loss:229.1885, Test Loss:226.9738
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.88it/s]
Epoch[59/60], Train Loss:228.7466, Test Loss:225.3420
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.53it/s]
Epoch[60/60], Train Loss:228.0630, Test Loss:224.5336 Performing Experiment: epoch_num=60, lr=1e-06
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.28it/s]
Epoch[1/60], Train Loss:295.0391, Test Loss:294.4319
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.47it/s]
Epoch[2/60], Train Loss:294.7245, Test Loss:294.0404
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.68it/s]
Epoch[3/60], Train Loss:294.2770, Test Loss:293.3631
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.71it/s]
Epoch[4/60], Train Loss:293.4329, Test Loss:292.0000
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.34it/s]
Epoch[5/60], Train Loss:291.8200, Test Loss:289.4613
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 31.91it/s]
Epoch[6/60], Train Loss:289.3437, Test Loss:286.3957
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.28it/s]
Epoch[7/60], Train Loss:287.4774, Test Loss:284.9734
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 31.88it/s]
Epoch[8/60], Train Loss:286.8712, Test Loss:284.5729
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.30it/s]
Epoch[9/60], Train Loss:286.6770, Test Loss:284.3703
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.57it/s]
Epoch[10/60], Train Loss:286.5050, Test Loss:284.2292
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.87it/s]
Epoch[11/60], Train Loss:286.4444, Test Loss:284.1767
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.34it/s]
Epoch[12/60], Train Loss:286.3753, Test Loss:284.1195
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.26it/s]
Epoch[13/60], Train Loss:286.3474, Test Loss:284.0470
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.10it/s]
Epoch[14/60], Train Loss:286.3091, Test Loss:284.0245
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.42it/s]
Epoch[15/60], Train Loss:286.2968, Test Loss:284.0124
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.20it/s]
Epoch[16/60], Train Loss:286.2755, Test Loss:284.0186
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.16it/s]
Epoch[17/60], Train Loss:286.2780, Test Loss:284.0081
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.89it/s]
Epoch[18/60], Train Loss:286.2309, Test Loss:283.9666
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.54it/s]
Epoch[19/60], Train Loss:286.2339, Test Loss:283.9461
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.57it/s]
Epoch[20/60], Train Loss:286.2114, Test Loss:283.9230
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.63it/s]
Epoch[21/60], Train Loss:286.1796, Test Loss:283.9001
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.49it/s]
Epoch[22/60], Train Loss:286.1940, Test Loss:283.8806
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.74it/s]
Epoch[23/60], Train Loss:286.1698, Test Loss:283.9376
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.64it/s]
Epoch[24/60], Train Loss:286.1570, Test Loss:283.8574
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.63it/s]
Epoch[25/60], Train Loss:286.1480, Test Loss:283.8891
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.43it/s]
Epoch[26/60], Train Loss:286.1433, Test Loss:283.9051
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.97it/s]
Epoch[27/60], Train Loss:286.1579, Test Loss:283.8451
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.62it/s]
Epoch[28/60], Train Loss:286.1318, Test Loss:283.8354
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.66it/s]
Epoch[29/60], Train Loss:286.1396, Test Loss:283.8437
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.45it/s]
Epoch[30/60], Train Loss:286.1181, Test Loss:283.8334
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.96it/s]
Epoch[31/60], Train Loss:286.1079, Test Loss:283.8118
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.80it/s]
Epoch[32/60], Train Loss:286.1122, Test Loss:283.8411
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.14it/s]
Epoch[33/60], Train Loss:286.0887, Test Loss:283.8231
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.33it/s]
Epoch[34/60], Train Loss:286.0825, Test Loss:283.7788
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.50it/s]
Epoch[35/60], Train Loss:286.0861, Test Loss:283.7965
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.38it/s]
Epoch[36/60], Train Loss:286.0642, Test Loss:283.7744
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.30it/s]
Epoch[37/60], Train Loss:286.0622, Test Loss:283.7709
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.79it/s]
Epoch[38/60], Train Loss:286.0374, Test Loss:283.7576
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.94it/s]
Epoch[39/60], Train Loss:286.0421, Test Loss:283.7618
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.10it/s]
Epoch[40/60], Train Loss:286.0271, Test Loss:283.7422
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.97it/s]
Epoch[41/60], Train Loss:286.0212, Test Loss:283.7345
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.54it/s]
Epoch[42/60], Train Loss:286.0138, Test Loss:283.7257
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.24it/s]
Epoch[43/60], Train Loss:285.9946, Test Loss:283.7149
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.74it/s]
Epoch[44/60], Train Loss:285.9960, Test Loss:283.7345
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.83it/s]
Epoch[45/60], Train Loss:285.9909, Test Loss:283.7085
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.90it/s]
Epoch[46/60], Train Loss:285.9721, Test Loss:283.7102
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.59it/s]
Epoch[47/60], Train Loss:285.9495, Test Loss:283.6742
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.74it/s]
Epoch[48/60], Train Loss:285.9635, Test Loss:283.6983
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.90it/s]
Epoch[49/60], Train Loss:285.9589, Test Loss:283.6475
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.39it/s]
Epoch[50/60], Train Loss:285.9496, Test Loss:283.6611
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.96it/s]
Epoch[51/60], Train Loss:285.9292, Test Loss:283.6440
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.52it/s]
Epoch[52/60], Train Loss:285.9167, Test Loss:283.6774
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.21it/s]
Epoch[53/60], Train Loss:285.9288, Test Loss:283.6477
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.19it/s]
Epoch[54/60], Train Loss:285.9109, Test Loss:283.6151
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.77it/s]
Epoch[55/60], Train Loss:285.8992, Test Loss:283.6203
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.98it/s]
Epoch[56/60], Train Loss:285.9005, Test Loss:283.6070
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.68it/s]
Epoch[57/60], Train Loss:285.8863, Test Loss:283.5994
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.60it/s]
Epoch[58/60], Train Loss:285.8761, Test Loss:283.5721
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.44it/s]
Epoch[59/60], Train Loss:285.8843, Test Loss:283.6120
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.60it/s]
Epoch[60/60], Train Loss:285.8553, Test Loss:283.5957 Performing Experiment: epoch_num=80, lr=0.001
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.74it/s]
Epoch[1/80], Train Loss:277.8682, Test Loss:236.7101
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.45it/s]
Epoch[2/80], Train Loss:196.6051, Test Loss:169.2227
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.46it/s]
Epoch[3/80], Train Loss:157.2191, Test Loss:143.4743
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.71it/s]
Epoch[4/80], Train Loss:142.2445, Test Loss:133.1717
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.32it/s]
Epoch[5/80], Train Loss:134.1444, Test Loss:128.4650
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.10it/s]
Epoch[6/80], Train Loss:129.2781, Test Loss:122.0376
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.32it/s]
Epoch[7/80], Train Loss:124.3405, Test Loss:116.5463
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.91it/s]
Epoch[8/80], Train Loss:121.8109, Test Loss:115.4687
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.61it/s]
Epoch[9/80], Train Loss:119.4961, Test Loss:114.3047
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.50it/s]
Epoch[10/80], Train Loss:117.0378, Test Loss:110.4671
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.17it/s]
Epoch[11/80], Train Loss:116.6359, Test Loss:111.8367
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.71it/s]
Epoch[12/80], Train Loss:115.2172, Test Loss:109.9100
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.07it/s]
Epoch[13/80], Train Loss:114.0932, Test Loss:106.4504
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.46it/s]
Epoch[14/80], Train Loss:112.2931, Test Loss:107.8855
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.93it/s]
Epoch[15/80], Train Loss:112.9931, Test Loss:106.9307
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.49it/s]
Epoch[16/80], Train Loss:111.0355, Test Loss:106.4232
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.06it/s]
Epoch[17/80], Train Loss:110.0961, Test Loss:107.5618
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.68it/s]
Epoch[18/80], Train Loss:110.2339, Test Loss:104.0645
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.95it/s]
Epoch[19/80], Train Loss:108.9132, Test Loss:102.9092
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.27it/s]
Epoch[20/80], Train Loss:107.4715, Test Loss:103.8177
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.97it/s]
Epoch[21/80], Train Loss:107.3280, Test Loss:104.2476
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.09it/s]
Epoch[22/80], Train Loss:107.6686, Test Loss:99.6730
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.86it/s]
Epoch[23/80], Train Loss:106.4326, Test Loss:100.5515
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.30it/s]
Epoch[24/80], Train Loss:105.7970, Test Loss:100.7688
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.00it/s]
Epoch[25/80], Train Loss:104.4891, Test Loss:103.1941
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.39it/s]
Epoch[26/80], Train Loss:105.4451, Test Loss:102.2304
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.09it/s]
Epoch[27/80], Train Loss:105.4102, Test Loss:99.8455
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.77it/s]
Epoch[28/80], Train Loss:104.1459, Test Loss:98.8022
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.60it/s]
Epoch[29/80], Train Loss:104.1561, Test Loss:97.7776
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.86it/s]
Epoch[30/80], Train Loss:103.8717, Test Loss:101.4086
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.76it/s]
Epoch[31/80], Train Loss:103.3716, Test Loss:98.4455
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.40it/s]
Epoch[32/80], Train Loss:103.4206, Test Loss:103.3282
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.64it/s]
Epoch[33/80], Train Loss:102.8985, Test Loss:102.1639
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.83it/s]
Epoch[34/80], Train Loss:102.1389, Test Loss:97.1313
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.78it/s]
Epoch[35/80], Train Loss:103.8156, Test Loss:99.3986
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.54it/s]
Epoch[36/80], Train Loss:102.7982, Test Loss:95.9201
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.86it/s]
Epoch[37/80], Train Loss:101.6037, Test Loss:97.3524
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.49it/s]
Epoch[38/80], Train Loss:102.1098, Test Loss:97.1823
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.18it/s]
Epoch[39/80], Train Loss:101.4764, Test Loss:95.7237
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.12it/s]
Epoch[40/80], Train Loss:101.8494, Test Loss:98.4287
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.14it/s]
Epoch[41/80], Train Loss:101.5340, Test Loss:98.4394
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.21it/s]
Epoch[42/80], Train Loss:100.4253, Test Loss:97.0596
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.21it/s]
Epoch[43/80], Train Loss:100.8058, Test Loss:93.9668
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.98it/s]
Epoch[44/80], Train Loss:100.9073, Test Loss:95.8810
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.26it/s]
Epoch[45/80], Train Loss:100.8711, Test Loss:95.6729
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.56it/s]
Epoch[46/80], Train Loss:101.3571, Test Loss:95.1734
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.80it/s]
Epoch[47/80], Train Loss:100.5825, Test Loss:94.7260
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.23it/s]
Epoch[48/80], Train Loss:100.0984, Test Loss:95.9252
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.68it/s]
Epoch[49/80], Train Loss:100.3963, Test Loss:93.7257
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.31it/s]
Epoch[50/80], Train Loss:99.4033, Test Loss:95.0538
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.70it/s]
Epoch[51/80], Train Loss:99.9478, Test Loss:95.2535
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.06it/s]
Epoch[52/80], Train Loss:99.6652, Test Loss:94.2544
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.85it/s]
Epoch[53/80], Train Loss:99.3725, Test Loss:93.1739
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.99it/s]
Epoch[54/80], Train Loss:99.0766, Test Loss:93.6647
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.55it/s]
Epoch[55/80], Train Loss:100.1088, Test Loss:96.9604
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.22it/s]
Epoch[56/80], Train Loss:98.9032, Test Loss:93.4999
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.20it/s]
Epoch[57/80], Train Loss:98.6609, Test Loss:93.1186
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.35it/s]
Epoch[58/80], Train Loss:98.4083, Test Loss:95.0938
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.68it/s]
Epoch[59/80], Train Loss:99.0134, Test Loss:92.6177
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.44it/s]
Epoch[60/80], Train Loss:98.0066, Test Loss:93.4637
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 31.90it/s]
Epoch[61/80], Train Loss:98.2020, Test Loss:94.8903
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.75it/s]
Epoch[62/80], Train Loss:98.5156, Test Loss:94.2197
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.48it/s]
Epoch[63/80], Train Loss:98.9299, Test Loss:93.6976
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.12it/s]
Epoch[64/80], Train Loss:98.1879, Test Loss:91.1141
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.74it/s]
Epoch[65/80], Train Loss:97.8354, Test Loss:91.7930
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.17it/s]
Epoch[66/80], Train Loss:97.3775, Test Loss:94.0143
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.36it/s]
Epoch[67/80], Train Loss:98.3877, Test Loss:92.7969
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.82it/s]
Epoch[68/80], Train Loss:98.6167, Test Loss:91.1325
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.42it/s]
Epoch[69/80], Train Loss:97.8219, Test Loss:93.3056
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.49it/s]
Epoch[70/80], Train Loss:98.0489, Test Loss:93.6346
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.12it/s]
Epoch[71/80], Train Loss:97.5119, Test Loss:94.3369
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.83it/s]
Epoch[72/80], Train Loss:97.8782, Test Loss:92.3717
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.54it/s]
Epoch[73/80], Train Loss:96.9219, Test Loss:94.3873
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.59it/s]
Epoch[74/80], Train Loss:97.5397, Test Loss:92.7478
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.21it/s]
Epoch[75/80], Train Loss:96.8399, Test Loss:91.4738
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.28it/s]
Epoch[76/80], Train Loss:97.6553, Test Loss:95.1173
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.02it/s]
Epoch[77/80], Train Loss:97.1336, Test Loss:93.4124
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.79it/s]
Epoch[78/80], Train Loss:96.4034, Test Loss:92.8789
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.93it/s]
Epoch[79/80], Train Loss:97.0415, Test Loss:93.1451
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.92it/s]
Epoch[80/80], Train Loss:96.9570, Test Loss:91.7277 Performing Experiment: epoch_num=80, lr=0.0001
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.10it/s]
Epoch[1/80], Train Loss:286.5248, Test Loss:283.8699
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.27it/s]
Epoch[2/80], Train Loss:276.4257, Test Loss:258.8106
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.02it/s]
Epoch[3/80], Train Loss:253.4193, Test Loss:246.2500
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.19it/s]
Epoch[4/80], Train Loss:241.7924, Test Loss:233.2400
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.54it/s]
Epoch[5/80], Train Loss:231.1112, Test Loss:224.8301
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.40it/s]
Epoch[6/80], Train Loss:221.1405, Test Loss:215.3762
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.39it/s]
Epoch[7/80], Train Loss:214.0830, Test Loss:208.8835
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.36it/s]
Epoch[8/80], Train Loss:208.1578, Test Loss:204.9469
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.96it/s]
Epoch[9/80], Train Loss:201.7589, Test Loss:198.0608
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.19it/s]
Epoch[10/80], Train Loss:196.9215, Test Loss:188.2789
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.53it/s]
Epoch[11/80], Train Loss:191.3552, Test Loss:186.6676
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.56it/s]
Epoch[12/80], Train Loss:186.0894, Test Loss:181.6012
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.74it/s]
Epoch[13/80], Train Loss:182.1298, Test Loss:177.1391
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.41it/s]
Epoch[14/80], Train Loss:177.6877, Test Loss:172.0623
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.46it/s]
Epoch[15/80], Train Loss:174.0476, Test Loss:168.8421
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.60it/s]
Epoch[16/80], Train Loss:170.0258, Test Loss:167.0885
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.68it/s]
Epoch[17/80], Train Loss:167.0230, Test Loss:160.9976
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.79it/s]
Epoch[18/80], Train Loss:165.6622, Test Loss:159.9205
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.44it/s]
Epoch[19/80], Train Loss:162.1768, Test Loss:157.7698
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.22it/s]
Epoch[20/80], Train Loss:160.7828, Test Loss:157.1086
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.26it/s]
Epoch[21/80], Train Loss:158.7494, Test Loss:155.4952
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.84it/s]
Epoch[22/80], Train Loss:156.1744, Test Loss:152.5213
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.05it/s]
Epoch[23/80], Train Loss:154.5031, Test Loss:150.1715
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.80it/s]
Epoch[24/80], Train Loss:154.6313, Test Loss:151.1394
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.22it/s]
Epoch[25/80], Train Loss:151.5686, Test Loss:147.5529
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.66it/s]
Epoch[26/80], Train Loss:150.2255, Test Loss:144.2880
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.18it/s]
Epoch[27/80], Train Loss:147.8683, Test Loss:143.2922
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.69it/s]
Epoch[28/80], Train Loss:147.5021, Test Loss:146.0288
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.36it/s]
Epoch[29/80], Train Loss:145.3462, Test Loss:142.1881
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.78it/s]
Epoch[30/80], Train Loss:144.1065, Test Loss:136.9128
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.42it/s]
Epoch[31/80], Train Loss:143.6170, Test Loss:137.4138
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.59it/s]
Epoch[32/80], Train Loss:141.1400, Test Loss:136.2028
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.04it/s]
Epoch[33/80], Train Loss:140.3058, Test Loss:136.3271
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.31it/s]
Epoch[34/80], Train Loss:139.6885, Test Loss:141.1067
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.47it/s]
Epoch[35/80], Train Loss:138.5257, Test Loss:137.8946
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.32it/s]
Epoch[36/80], Train Loss:137.6868, Test Loss:133.4688
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.29it/s]
Epoch[37/80], Train Loss:136.4505, Test Loss:131.9210
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.47it/s]
Epoch[38/80], Train Loss:136.5660, Test Loss:129.8104
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.32it/s]
Epoch[39/80], Train Loss:135.0922, Test Loss:132.5575
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.00it/s]
Epoch[40/80], Train Loss:135.0095, Test Loss:131.4332
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.78it/s]
Epoch[41/80], Train Loss:133.8140, Test Loss:130.0824
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.11it/s]
Epoch[42/80], Train Loss:132.4937, Test Loss:128.9972
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.01it/s]
Epoch[43/80], Train Loss:132.4934, Test Loss:128.2384
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.95it/s]
Epoch[44/80], Train Loss:130.5592, Test Loss:127.5000
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.60it/s]
Epoch[45/80], Train Loss:130.4742, Test Loss:124.4509
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.84it/s]
Epoch[46/80], Train Loss:129.3207, Test Loss:124.6289
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.87it/s]
Epoch[47/80], Train Loss:128.6498, Test Loss:124.9471
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.81it/s]
Epoch[48/80], Train Loss:129.0289, Test Loss:126.2955
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.81it/s]
Epoch[49/80], Train Loss:127.6521, Test Loss:123.9638
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.96it/s]
Epoch[50/80], Train Loss:127.4623, Test Loss:122.8838
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.16it/s]
Epoch[51/80], Train Loss:126.4869, Test Loss:122.5957
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.49it/s]
Epoch[52/80], Train Loss:125.6947, Test Loss:121.4731
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.39it/s]
Epoch[53/80], Train Loss:124.7252, Test Loss:122.3023
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.94it/s]
Epoch[54/80], Train Loss:124.7892, Test Loss:118.0138
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.89it/s]
Epoch[55/80], Train Loss:124.7003, Test Loss:120.6715
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.43it/s]
Epoch[56/80], Train Loss:123.0114, Test Loss:120.9568
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.07it/s]
Epoch[57/80], Train Loss:123.0097, Test Loss:119.5302
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.94it/s]
Epoch[58/80], Train Loss:122.0248, Test Loss:117.2344
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.66it/s]
Epoch[59/80], Train Loss:122.1582, Test Loss:117.8473
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.05it/s]
Epoch[60/80], Train Loss:121.2939, Test Loss:117.7635
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.82it/s]
Epoch[61/80], Train Loss:121.4976, Test Loss:118.0773
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.69it/s]
Epoch[62/80], Train Loss:121.5877, Test Loss:117.5923
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.02it/s]
Epoch[63/80], Train Loss:120.6803, Test Loss:118.8535
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.38it/s]
Epoch[64/80], Train Loss:120.6186, Test Loss:115.9259
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.21it/s]
Epoch[65/80], Train Loss:118.5567, Test Loss:114.9710
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.23it/s]
Epoch[66/80], Train Loss:119.3428, Test Loss:114.2387
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.95it/s]
Epoch[67/80], Train Loss:119.8756, Test Loss:114.7896
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.64it/s]
Epoch[68/80], Train Loss:118.3419, Test Loss:114.9009
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.23it/s]
Epoch[69/80], Train Loss:117.9786, Test Loss:113.0454
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.47it/s]
Epoch[70/80], Train Loss:118.5545, Test Loss:112.3153
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.28it/s]
Epoch[71/80], Train Loss:116.4027, Test Loss:116.3193
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.36it/s]
Epoch[72/80], Train Loss:116.4801, Test Loss:113.3558
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.09it/s]
Epoch[73/80], Train Loss:116.1813, Test Loss:110.7471
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.52it/s]
Epoch[74/80], Train Loss:116.7941, Test Loss:112.8114
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.66it/s]
Epoch[75/80], Train Loss:115.4452, Test Loss:111.3086
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.40it/s]
Epoch[76/80], Train Loss:115.6674, Test Loss:111.2418
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.86it/s]
Epoch[77/80], Train Loss:115.8941, Test Loss:113.3292
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.61it/s]
Epoch[78/80], Train Loss:115.1228, Test Loss:110.6519
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.58it/s]
Epoch[79/80], Train Loss:114.1405, Test Loss:110.8332
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.60it/s]
Epoch[80/80], Train Loss:115.4959, Test Loss:111.4980 Performing Experiment: epoch_num=80, lr=1e-05
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.52it/s]
Epoch[1/80], Train Loss:289.4169, Test Loss:284.5927
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.78it/s]
Epoch[2/80], Train Loss:286.1422, Test Loss:283.9332
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.10it/s]
Epoch[3/80], Train Loss:285.9551, Test Loss:283.7711
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.36it/s]
Epoch[4/80], Train Loss:285.8977, Test Loss:283.7739
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.63it/s]
Epoch[5/80], Train Loss:285.8352, Test Loss:283.6311
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.17it/s]
Epoch[6/80], Train Loss:285.7528, Test Loss:283.5800
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.94it/s]
Epoch[7/80], Train Loss:285.6404, Test Loss:283.4118
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.24it/s]
Epoch[8/80], Train Loss:285.4606, Test Loss:283.0912
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.23it/s]
Epoch[9/80], Train Loss:285.1668, Test Loss:282.6870
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.45it/s]
Epoch[10/80], Train Loss:284.6352, Test Loss:282.0103
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.95it/s]
Epoch[11/80], Train Loss:283.6935, Test Loss:280.3618
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.01it/s]
Epoch[12/80], Train Loss:281.7972, Test Loss:277.7204
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.42it/s]
Epoch[13/80], Train Loss:279.0990, Test Loss:274.3379
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.70it/s]
Epoch[14/80], Train Loss:275.5882, Test Loss:270.5581
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.33it/s]
Epoch[15/80], Train Loss:271.9188, Test Loss:267.5205
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.37it/s]
Epoch[16/80], Train Loss:269.0143, Test Loss:264.4861
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.07it/s]
Epoch[17/80], Train Loss:267.1500, Test Loss:263.3589
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.98it/s]
Epoch[18/80], Train Loss:265.2659, Test Loss:261.9778
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.16it/s]
Epoch[19/80], Train Loss:263.5672, Test Loss:260.5488
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.56it/s]
Epoch[20/80], Train Loss:262.4637, Test Loss:260.3398
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.64it/s]
Epoch[21/80], Train Loss:260.7400, Test Loss:256.9068
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.31it/s]
Epoch[22/80], Train Loss:259.9744, Test Loss:256.8272
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.82it/s]
Epoch[23/80], Train Loss:258.5120, Test Loss:255.2184
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.25it/s]
Epoch[24/80], Train Loss:257.4383, Test Loss:255.6592
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.97it/s]
Epoch[25/80], Train Loss:256.4215, Test Loss:254.0812
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.72it/s]
Epoch[26/80], Train Loss:255.3999, Test Loss:252.6325
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.80it/s]
Epoch[27/80], Train Loss:254.3907, Test Loss:252.8279
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.74it/s]
Epoch[28/80], Train Loss:253.4980, Test Loss:252.2093
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.21it/s]
Epoch[29/80], Train Loss:253.3335, Test Loss:252.7874
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.22it/s]
Epoch[30/80], Train Loss:252.0428, Test Loss:250.5222
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.47it/s]
Epoch[31/80], Train Loss:251.3087, Test Loss:249.3173
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.05it/s]
Epoch[32/80], Train Loss:250.4105, Test Loss:248.0075
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.82it/s]
Epoch[33/80], Train Loss:249.5638, Test Loss:246.6990
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.43it/s]
Epoch[34/80], Train Loss:249.2542, Test Loss:246.5553
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.15it/s]
Epoch[35/80], Train Loss:248.7271, Test Loss:245.9655
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.83it/s]
Epoch[36/80], Train Loss:248.2950, Test Loss:245.1653
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.01it/s]
Epoch[37/80], Train Loss:247.9837, Test Loss:246.3421
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.31it/s]
Epoch[38/80], Train Loss:247.2203, Test Loss:245.2242
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.04it/s]
Epoch[39/80], Train Loss:246.2953, Test Loss:245.2260
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.76it/s]
Epoch[40/80], Train Loss:246.6695, Test Loss:245.6277
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.46it/s]
Epoch[41/80], Train Loss:245.6002, Test Loss:243.5512
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.71it/s]
Epoch[42/80], Train Loss:244.8543, Test Loss:243.8257
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.90it/s]
Epoch[43/80], Train Loss:244.6210, Test Loss:243.3569
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.45it/s]
Epoch[44/80], Train Loss:244.4885, Test Loss:241.1708
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.62it/s]
Epoch[45/80], Train Loss:243.6444, Test Loss:241.9610
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.48it/s]
Epoch[46/80], Train Loss:243.0775, Test Loss:241.2313
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.31it/s]
Epoch[47/80], Train Loss:243.6358, Test Loss:241.6516
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.43it/s]
Epoch[48/80], Train Loss:242.8071, Test Loss:240.9293
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.06it/s]
Epoch[49/80], Train Loss:241.9013, Test Loss:240.0861
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.05it/s]
Epoch[50/80], Train Loss:241.7872, Test Loss:239.7731
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.93it/s]
Epoch[51/80], Train Loss:241.3614, Test Loss:239.9437
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.53it/s]
Epoch[52/80], Train Loss:240.5796, Test Loss:240.2064
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.13it/s]
Epoch[53/80], Train Loss:240.4933, Test Loss:236.9888
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.92it/s]
Epoch[54/80], Train Loss:239.7459, Test Loss:239.3999
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.34it/s]
Epoch[55/80], Train Loss:239.0754, Test Loss:238.6552
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.81it/s]
Epoch[56/80], Train Loss:239.1940, Test Loss:236.3566
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.97it/s]
Epoch[57/80], Train Loss:237.9807, Test Loss:236.7118
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.56it/s]
Epoch[58/80], Train Loss:238.0892, Test Loss:235.9307
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.41it/s]
Epoch[59/80], Train Loss:237.8306, Test Loss:234.7428
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.62it/s]
Epoch[60/80], Train Loss:236.6307, Test Loss:234.6518
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.14it/s]
Epoch[61/80], Train Loss:236.8269, Test Loss:234.5908
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.54it/s]
Epoch[62/80], Train Loss:236.0829, Test Loss:235.2082
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.31it/s]
Epoch[63/80], Train Loss:236.0415, Test Loss:233.7514
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.04it/s]
Epoch[64/80], Train Loss:234.9559, Test Loss:234.1008
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.23it/s]
Epoch[65/80], Train Loss:235.5851, Test Loss:233.2918
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.95it/s]
Epoch[66/80], Train Loss:234.1379, Test Loss:232.6573
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.90it/s]
Epoch[67/80], Train Loss:233.7615, Test Loss:231.2782
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.65it/s]
Epoch[68/80], Train Loss:233.8111, Test Loss:231.5673
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.93it/s]
Epoch[69/80], Train Loss:233.0673, Test Loss:230.7159
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.78it/s]
Epoch[70/80], Train Loss:231.7560, Test Loss:230.8094
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.92it/s]
Epoch[71/80], Train Loss:232.6441, Test Loss:232.6355
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.87it/s]
Epoch[72/80], Train Loss:232.1562, Test Loss:230.2153
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.05it/s]
Epoch[73/80], Train Loss:231.3666, Test Loss:229.6396
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.69it/s]
Epoch[74/80], Train Loss:230.7140, Test Loss:229.1937
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.80it/s]
Epoch[75/80], Train Loss:230.6282, Test Loss:228.8398
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.51it/s]
Epoch[76/80], Train Loss:230.3494, Test Loss:229.4988
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.01it/s]
Epoch[77/80], Train Loss:229.5286, Test Loss:228.2184
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.11it/s]
Epoch[78/80], Train Loss:229.3207, Test Loss:227.0861
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.44it/s]
Epoch[79/80], Train Loss:228.1831, Test Loss:227.2397
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.29it/s]
Epoch[80/80], Train Loss:227.3896, Test Loss:226.8954 Performing Experiment: epoch_num=80, lr=1e-06
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.19it/s]
Epoch[1/80], Train Loss:294.3027, Test Loss:293.4793
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.08it/s]
Epoch[2/80], Train Loss:293.7893, Test Loss:292.8058
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.58it/s]
Epoch[3/80], Train Loss:292.9761, Test Loss:291.5275
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.76it/s]
Epoch[4/80], Train Loss:291.2868, Test Loss:288.7639
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.56it/s]
Epoch[5/80], Train Loss:288.5866, Test Loss:285.6467
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.66it/s]
Epoch[6/80], Train Loss:286.8913, Test Loss:284.6310
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.95it/s]
Epoch[7/80], Train Loss:286.4880, Test Loss:284.3291
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.07it/s]
Epoch[8/80], Train Loss:286.3317, Test Loss:284.1932
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.31it/s]
Epoch[9/80], Train Loss:286.2353, Test Loss:284.0820
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.67it/s]
Epoch[10/80], Train Loss:286.1882, Test Loss:284.0037
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.97it/s]
Epoch[11/80], Train Loss:286.1407, Test Loss:283.9597
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.44it/s]
Epoch[12/80], Train Loss:286.0975, Test Loss:283.9333
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.85it/s]
Epoch[13/80], Train Loss:286.0931, Test Loss:283.9076
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.50it/s]
Epoch[14/80], Train Loss:286.0796, Test Loss:283.8545
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.16it/s]
Epoch[15/80], Train Loss:286.0682, Test Loss:283.8677
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.03it/s]
Epoch[16/80], Train Loss:286.0422, Test Loss:283.7851
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.89it/s]
Epoch[17/80], Train Loss:286.0224, Test Loss:283.8510
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.65it/s]
Epoch[18/80], Train Loss:286.0206, Test Loss:283.8108
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.90it/s]
Epoch[19/80], Train Loss:286.0032, Test Loss:283.7988
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.07it/s]
Epoch[20/80], Train Loss:286.0044, Test Loss:283.7867
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.24it/s]
Epoch[21/80], Train Loss:285.9674, Test Loss:283.7700
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.67it/s]
Epoch[22/80], Train Loss:285.9847, Test Loss:283.7765
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.22it/s]
Epoch[23/80], Train Loss:285.9607, Test Loss:283.7589
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.97it/s]
Epoch[24/80], Train Loss:285.9593, Test Loss:283.7692
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.39it/s]
Epoch[25/80], Train Loss:285.9505, Test Loss:283.7634
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.95it/s]
Epoch[26/80], Train Loss:285.9380, Test Loss:283.7042
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 28.82it/s]
Epoch[27/80], Train Loss:285.9380, Test Loss:283.7101
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.84it/s]
Epoch[28/80], Train Loss:285.9237, Test Loss:283.6948
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.05it/s]
Epoch[29/80], Train Loss:285.9377, Test Loss:283.6984
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.10it/s]
Epoch[30/80], Train Loss:285.9188, Test Loss:283.7021
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.82it/s]
Epoch[31/80], Train Loss:285.9144, Test Loss:283.6396
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.01it/s]
Epoch[32/80], Train Loss:285.8999, Test Loss:283.6743
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.08it/s]
Epoch[33/80], Train Loss:285.8915, Test Loss:283.6699
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.09it/s]
Epoch[34/80], Train Loss:285.8838, Test Loss:283.6386
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.33it/s]
Epoch[35/80], Train Loss:285.8753, Test Loss:283.6374
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.28it/s]
Epoch[36/80], Train Loss:285.8802, Test Loss:283.6226
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.28it/s]
Epoch[37/80], Train Loss:285.8772, Test Loss:283.6316
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.53it/s]
Epoch[38/80], Train Loss:285.8671, Test Loss:283.6063
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.62it/s]
Epoch[39/80], Train Loss:285.8503, Test Loss:283.6002
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.78it/s]
Epoch[40/80], Train Loss:285.8429, Test Loss:283.5916
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.09it/s]
Epoch[41/80], Train Loss:285.8232, Test Loss:283.5900
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.52it/s]
Epoch[42/80], Train Loss:285.8269, Test Loss:283.5791
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.20it/s]
Epoch[43/80], Train Loss:285.8426, Test Loss:283.5694
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.79it/s]
Epoch[44/80], Train Loss:285.8083, Test Loss:283.5612
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.79it/s]
Epoch[45/80], Train Loss:285.8115, Test Loss:283.5429
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.53it/s]
Epoch[46/80], Train Loss:285.8121, Test Loss:283.5361
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.22it/s]
Epoch[47/80], Train Loss:285.7942, Test Loss:283.5227
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.31it/s]
Epoch[48/80], Train Loss:285.7858, Test Loss:283.5416
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.22it/s]
Epoch[49/80], Train Loss:285.7705, Test Loss:283.5121
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.30it/s]
Epoch[50/80], Train Loss:285.7492, Test Loss:283.4941
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 30.33it/s]
Epoch[51/80], Train Loss:285.7612, Test Loss:283.5237
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.76it/s]
Epoch[52/80], Train Loss:285.7519, Test Loss:283.4657
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.44it/s]
Epoch[53/80], Train Loss:285.7493, Test Loss:283.4704
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.62it/s]
Epoch[54/80], Train Loss:285.7361, Test Loss:283.4572
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.49it/s]
Epoch[55/80], Train Loss:285.7223, Test Loss:283.4348
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.26it/s]
Epoch[56/80], Train Loss:285.7170, Test Loss:283.4321
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.81it/s]
Epoch[57/80], Train Loss:285.7074, Test Loss:283.4295
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 30.04it/s]
Epoch[58/80], Train Loss:285.6963, Test Loss:283.4156
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.71it/s]
Epoch[59/80], Train Loss:285.6869, Test Loss:283.3820
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.49it/s]
Epoch[60/80], Train Loss:285.6798, Test Loss:283.3891
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.40it/s]
Epoch[61/80], Train Loss:285.6790, Test Loss:283.3691
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.16it/s]
Epoch[62/80], Train Loss:285.6587, Test Loss:283.3540
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.86it/s]
Epoch[63/80], Train Loss:285.6648, Test Loss:283.3500
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.34it/s]
Epoch[64/80], Train Loss:285.6600, Test Loss:283.3274
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.63it/s]
Epoch[65/80], Train Loss:285.6267, Test Loss:283.3123
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.49it/s]
Epoch[66/80], Train Loss:285.6134, Test Loss:283.3244
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.88it/s]
Epoch[67/80], Train Loss:285.6022, Test Loss:283.2633
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.37it/s]
Epoch[68/80], Train Loss:285.5755, Test Loss:283.2789
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 31.84it/s]
Epoch[69/80], Train Loss:285.5837, Test Loss:283.2257
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 31.85it/s]
Epoch[70/80], Train Loss:285.5590, Test Loss:283.2661
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.40it/s]
Epoch[71/80], Train Loss:285.5510, Test Loss:283.2151
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.06it/s]
Epoch[72/80], Train Loss:285.5442, Test Loss:283.1890
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.25it/s]
Epoch[73/80], Train Loss:285.5174, Test Loss:283.1739
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 32.41it/s]
Epoch[74/80], Train Loss:285.5023, Test Loss:283.1745
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 33.53it/s]
Epoch[75/80], Train Loss:285.4807, Test Loss:283.1334
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:18<00:00, 31.74it/s]
Epoch[76/80], Train Loss:285.4751, Test Loss:283.0994
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:17<00:00, 31.87it/s]
Epoch[77/80], Train Loss:285.4490, Test Loss:283.1165
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.57it/s]
Epoch[78/80], Train Loss:285.4413, Test Loss:283.0676
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.30it/s]
Epoch[79/80], Train Loss:285.4041, Test Loss:283.0302
100%|████████████████████████████████████████████████████████████████████████████████| 573/573 [00:19<00:00, 29.49it/s]
Epoch[80/80], Train Loss:285.4001, Test Loss:282.9871
3.2.3 Load Experiments¶
Load the saved experiments, and plot the epoch-loss curve to inspect training performance.
loaded_experiments = torch.load("./models/experiments_17296227919579012.pth")
D:\Temps\temp\ipykernel_68752\2506138615.py:1: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature.
loaded_experiments = torch.load("./models/experiments_17296227919579012.pth")
plot_el(loaded_experiments)
3.2.4 Apply Model, Get Result¶
def get_experiment_results(loaded_experiments, extra_loader):
experiment_results = []
for i, exp in enumerate(loaded_experiments):
pred_scores, true_labels_cpu, pred_labels_cpu = get_predictions(exp["model_state_dict"], extra_loader)
print(f"Experiment {i+1}, num_epoch={exp['num_epoch']}, lr={exp['lr']}")
print("First 100 true labels:")
[print(num, end=" ") for num in true_labels_cpu[:100]]
print("...\n")
print("First 100 true predictions:")
[print(num, end=" ") for num in pred_labels_cpu[:100]]
print("...\n")
print("First 5 prediction Probabilities:")
[print(num, end=" ") for num in pred_scores[:5]]
print("...")
experiment_results.append({
"epoch_num": exp['num_epoch'],
"lr": exp['lr'],
"true_labels": true_labels_cpu,
"pred_labels": pred_labels_cpu,
"pred_scores": pred_scores
})
del pred_scores, true_labels_cpu, pred_labels_cpu
torch.cuda.empty_cache()
return experiment_results
experiment_results = get_experiment_results(loaded_experiments, extra_loader)
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:08<00:00, 57.57it/s]
Experiment 1, num_epoch=20, lr=0.001 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 2 1 8 7 1 7 7 7 3 0 2 8 8 3 1 1 7 1 3 7 5 6 4 4 4 6 6 8 4 3 2 1 7 6 5 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 2 6 5 2 3 4 7 7 7 3 1 2 2 0 1 1 3 1 5 2 1 8 2 2 8 0 5 1 3 8 2 0 5 6 0 7 8 3 0 9 4 0 3 7 1 6 0 ... First 5 prediction Probabilities: [0.017714930698275566, 0.11971186846494675, 0.24600271880626678, 0.1634218990802765, 0.10604322701692581, 0.09993854910135269, 0.07444882392883301, 0.09129485487937927, 0.03048068843781948, 0.05094243958592415] [0.03476187214255333, 0.4355396628379822, 0.013863475993275642, 0.009521962143480778, 0.05131062492728233, 0.003921713214367628, 0.01304430142045021, 0.4110223054885864, 0.010886728763580322, 0.01612740010023117] [2.926520484280992e-11, 1.9651599791892238e-10, 1.1518684672751078e-08, 1.6825642035200872e-07, 1.5297356437127263e-10, 3.0309657561566894e-10, 7.056090112200764e-08, 1.1634063157295316e-10, 0.9999996423721313, 3.558699646077912e-08] [2.3501052215380014e-09, 1.2330511935942923e-06, 3.8137210367494845e-07, 1.2438673593351268e-06, 1.8493298057364882e-06, 2.4140272003592145e-09, 1.900778645591572e-09, 0.9999953508377075, 6.7932157676864335e-09, 4.1174885723194166e-09] [0.06164313107728958, 0.7637706995010376, 0.0017260065069422126, 0.012882382608950138, 0.04687739908695221, 0.003659889567643404, 0.006159364711493254, 0.09059558063745499, 0.009119569323956966, 0.0035659889690577984] ...
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:07<00:00, 63.91it/s]
Experiment 2, num_epoch=20, lr=0.0001 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 1 2 5 7 1 1 1 1 1 0 2 8 1 3 1 3 7 1 5 7 7 3 4 4 4 4 3 1 4 6 0 1 1 6 0 1 1 0 5 1 5 1 8 5 5 2 9 6 3 5 9 3 5 3 3 9 4 3 4 1 1 7 3 7 2 2 5 1 1 3 1 6 1 1 9 1 2 2 1 3 1 3 8 2 0 6 1 0 7 8 2 1 6 4 0 3 1 1 0 0 ... First 5 prediction Probabilities: [0.04172287881374359, 0.3212367296218872, 0.11267822980880737, 0.08403488993644714, 0.1291959136724472, 0.05936244875192642, 0.05597791448235512, 0.11382696032524109, 0.04099001735448837, 0.04097403585910797] [0.008109464310109615, 0.15274515748023987, 0.29611915349960327, 0.12467671930789948, 0.020193910226225853, 0.09674064815044403, 0.08162152767181396, 0.03990790247917175, 0.07686871290206909, 0.10301680862903595] [0.001103568123653531, 0.0018934233812615275, 0.027225932106375694, 0.10065190494060516, 0.00462863314896822, 0.680852472782135, 0.1441899836063385, 0.002218537963926792, 0.019375605508685112, 0.01785995252430439] [7.776140478199522e-07, 0.00023751752451062202, 1.265541754946753e-06, 7.943081072880886e-07, 4.357044872449478e-06, 2.038133661841357e-09, 1.3706218382480984e-08, 0.9997550845146179, 3.96911339350936e-08, 7.134497792549155e-08] [0.004980664700269699, 0.9530691504478455, 0.0012971817050129175, 0.0018052858067676425, 0.032232996076345444, 0.00013973248132970184, 0.0015251366421580315, 0.003493305528536439, 9.582896018400788e-05, 0.0013607727596536279] ...
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:07<00:00, 62.15it/s]
Experiment 3, num_epoch=20, lr=1e-05 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 4 1 2 1 1 1 1 1 5 1 2 2 5 1 1 1 7 1 1 1 5 1 1 1 1 1 1 2 1 3 1 1 1 1 7 1 1 1 2 7 6 1 2 5 5 2 3 7 4 3 2 2 7 1 2 3 2 1 4 1 1 7 6 1 5 2 1 1 1 2 1 7 1 1 3 4 3 1 1 5 1 6 4 2 1 1 1 1 7 1 2 1 2 1 4 2 3 1 1 5 ... First 5 prediction Probabilities: [0.03734436258673668, 0.04948404058814049, 0.21611124277114868, 0.10869403183460236, 0.2592947781085968, 0.06012629717588425, 0.07799053937196732, 0.011390982195734978, 0.08965682238340378, 0.08990683406591415] [0.11409001797437668, 0.4766749143600464, 0.0807449221611023, 0.050620533525943756, 0.07312004268169403, 0.01621292717754841, 0.0411507673561573, 0.06620706617832184, 0.047097332775592804, 0.03408142924308777] [0.018800511956214905, 0.010320345871150494, 0.29691198468208313, 0.18822698295116425, 0.054632868617773056, 0.14778466522693634, 0.09250607341527939, 0.01992134563624859, 0.08956916630268097, 0.08132601529359818] [0.06254780292510986, 0.5713654160499573, 0.06322319805622101, 0.04719596356153488, 0.015861205756664276, 0.013016232289373875, 0.04328763857483864, 0.13692724704742432, 0.034063659608364105, 0.012511675246059895] [0.09457781165838242, 0.42980247735977173, 0.10134153068065643, 0.07567155361175537, 0.025417957454919815, 0.017216505482792854, 0.03426406905055046, 0.15485386550426483, 0.04253013804554939, 0.02432398498058319] ...
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:07<00:00, 63.31it/s]
Experiment 4, num_epoch=20, lr=1e-06 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 ... First 5 prediction Probabilities: [0.06283650547266006, 0.2016061246395111, 0.14883245527744293, 0.11737629026174545, 0.1013929545879364, 0.09233619272708893, 0.07597127556800842, 0.07484865933656693, 0.06537968665361404, 0.05941982939839363] [0.06316563487052917, 0.20025570690631866, 0.14844679832458496, 0.11725891381502151, 0.10155297815799713, 0.09246815741062164, 0.07616836577653885, 0.07512465119361877, 0.06575461477041245, 0.059804175049066544] [0.06319037824869156, 0.19966019690036774, 0.14822980761528015, 0.11717073619365692, 0.10161317139863968, 0.09264856576919556, 0.07626385986804962, 0.07521895319223404, 0.06593448668718338, 0.06006978079676628] [0.05600240081548691, 0.22651784121990204, 0.1589825302362442, 0.11821643263101578, 0.09990055859088898, 0.08916525542736053, 0.07033000141382217, 0.06938424706459045, 0.05854524299502373, 0.052955422550439835] [0.0570356510579586, 0.22363786399364471, 0.15825268626213074, 0.11802932620048523, 0.10065381973981857, 0.08910296112298965, 0.07074697315692902, 0.07010345160961151, 0.05910445749759674, 0.053332824259996414] ...
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:07<00:00, 63.39it/s]
Experiment 5, num_epoch=40, lr=0.001 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 4 1 8 7 1 7 7 4 3 2 2 8 1 3 6 1 7 0 8 1 5 6 4 4 4 6 3 4 4 9 0 1 7 6 0 7 1 0 5 3 6 1 4 5 5 2 9 6 1 5 2 2 5 3 6 9 2 3 4 1 7 1 3 1 2 2 0 1 1 1 1 5 2 1 9 2 9 8 0 5 7 3 2 9 1 5 6 0 7 8 3 0 2 4 0 3 1 1 0 0 ... First 5 prediction Probabilities: [0.04449288547039032, 0.07780998200178146, 0.06755146384239197, 0.1529538780450821, 0.3235415518283844, 0.07038334757089615, 0.09183072298765182, 0.040107257664203644, 0.08839812874794006, 0.0429307222366333] [0.016988875344395638, 0.48411113023757935, 0.033633965998888016, 0.19107411801815033, 0.10796966403722763, 0.003598700975999236, 0.0012069017393514514, 0.1463702917098999, 0.002807996701449156, 0.01223838236182928] [0.0015220834175124764, 3.07643786072731e-05, 0.0006338836392387748, 0.001289050211198628, 1.1900595353608878e-07, 0.0003679135988932103, 0.053039468824863434, 3.0722421229256724e-07, 0.9423934817314148, 0.000722900265827775] [0.005543236155062914, 0.0379282645881176, 0.014228818006813526, 0.00858987681567669, 0.07698293030261993, 0.005204638000577688, 0.018007274717092514, 0.7500907182693481, 0.07028057426214218, 0.013143601827323437] [0.0029470503795892, 0.7723882794380188, 0.0017544578295201063, 0.007224154658615589, 0.0503050871193409, 0.00037459106533788145, 0.0022835498675704002, 0.15914276242256165, 0.0024723540991544724, 0.0011077923700213432] ...
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:07<00:00, 64.12it/s]
Experiment 6, num_epoch=40, lr=0.0001 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 1 3 8 7 7 1 7 1 3 0 5 8 8 3 1 4 7 1 8 1 5 5 1 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 2 5 2 3 6 1 4 0 3 5 3 2 7 2 3 4 1 7 7 3 7 5 2 0 1 1 3 3 5 1 5 6 5 1 2 0 5 1 3 4 2 0 5 6 1 7 8 3 4 6 4 1 5 7 1 0 6 ... First 5 prediction Probabilities: [0.001532549038529396, 0.5403372049331665, 0.18466295301914215, 0.0252687931060791, 0.17075251042842865, 0.01856696419417858, 0.0013696191599592566, 0.0513155534863472, 0.0009633851004764438, 0.005230504088103771] [0.0005886601284146309, 0.008624495007097721, 0.0055307974107563496, 0.8883817791938782, 0.0038658585399389267, 0.049001436680555344, 0.005005527753382921, 0.0009947187500074506, 0.030277488753199577, 0.007729259319603443] [7.565062674075307e-07, 5.863848073772715e-08, 1.6275619145744713e-06, 3.114942228421569e-05, 1.1273754552121318e-07, 5.679984951711958e-06, 0.00015183209325186908, 6.812635433561809e-08, 0.9994888305664062, 0.00031986687099561095] [2.1376543372753076e-05, 5.384137693908997e-05, 5.784081054116541e-07, 2.6914460704574594e-07, 0.0027108362410217524, 1.1711234204625498e-09, 1.4999753261690785e-07, 0.9972129464149475, 9.698260861057406e-09, 1.6951966941292085e-08] [4.869095573667437e-05, 0.043014172464609146, 0.0004937132471241057, 0.00012731853348668665, 6.1005779571132734e-05, 9.11996467038989e-05, 1.9417137082200497e-05, 0.9561179876327515, 7.821669896657113e-06, 1.8690881915972568e-05] ...
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:07<00:00, 62.81it/s]
Experiment 7, num_epoch=40, lr=1e-05 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 3 1 3 7 1 4 1 7 3 1 5 2 5 7 1 1 3 0 4 1 3 1 1 1 3 1 2 4 1 6 2 1 7 3 1 1 1 1 5 7 1 1 6 3 5 2 3 3 3 3 1 3 5 2 4 5 2 4 1 0 1 7 3 3 5 5 3 1 1 5 1 3 2 1 2 2 4 1 7 5 1 5 6 1 2 7 1 0 1 1 4 1 1 4 1 1 1 1 1 2 ... First 5 prediction Probabilities: [0.01965557225048542, 0.1597338616847992, 0.08925321698188782, 0.2211056798696518, 0.047062113881111145, 0.10267326235771179, 0.16552039980888367, 0.041400209069252014, 0.11202101409435272, 0.041574716567993164] [0.041947584599256516, 0.22531454265117645, 0.17432531714439392, 0.12542864680290222, 0.05469188839197159, 0.06576590985059738, 0.03885629400610924, 0.11832401156425476, 0.07215164601802826, 0.08319412916898727] [0.006537625566124916, 0.06173282116651535, 0.10396739095449448, 0.2644512355327606, 0.09229924529790878, 0.13502782583236694, 0.10697690397500992, 0.021616633981466293, 0.1422061026096344, 0.06518415361642838] [0.0760091170668602, 0.11181364953517914, 0.1747245490550995, 0.11489276587963104, 0.010438697412610054, 0.018139712512493134, 0.03440474346280098, 0.40534067153930664, 0.037143610417842865, 0.017092492431402206] [0.06184305250644684, 0.3153344690799713, 0.10143514722585678, 0.11861491203308105, 0.042504988610744476, 0.02643566019833088, 0.05313699319958687, 0.18364273011684418, 0.0692821517586708, 0.027769941836595535] ...
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:07<00:00, 63.75it/s]
Experiment 8, num_epoch=40, lr=1e-06 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 ... First 5 prediction Probabilities: [0.06072084605693817, 0.20953546464443207, 0.1532699018716812, 0.11686752736568451, 0.10204272717237473, 0.09095597267150879, 0.07428176701068878, 0.07208140939474106, 0.06355855613946915, 0.05668583884835243] [0.060542721301317215, 0.20986443758010864, 0.15357233583927155, 0.1170819103717804, 0.10221734642982483, 0.0906110480427742, 0.07397196441888809, 0.07206741720438004, 0.06351042538881302, 0.05656042695045471] [0.06205868348479271, 0.20502284169197083, 0.15127024054527283, 0.11702579259872437, 0.10210031270980835, 0.09131041914224625, 0.07502157241106033, 0.07330628484487534, 0.06475944072008133, 0.05812434107065201] [0.05740886554121971, 0.22093859314918518, 0.15763479471206665, 0.11731761693954468, 0.10209186375141144, 0.08879104256629944, 0.07192893326282501, 0.06962739676237106, 0.06103253737092018, 0.05322834476828575] [0.058514971286058426, 0.21729683876037598, 0.15605995059013367, 0.11728127300739288, 0.10211609303951263, 0.08941806852817535, 0.07255896180868149, 0.07046522200107574, 0.061941999942064285, 0.05434659123420715] ...
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:07<00:00, 62.71it/s]
Experiment 9, num_epoch=60, lr=0.001 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 4 1 8 7 1 1 7 4 5 5 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 8 1 4 3 0 1 7 5 0 1 1 0 5 4 5 1 8 5 2 2 9 9 5 6 2 5 7 3 1 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 2 4 8 1 5 1 3 3 2 0 4 6 0 7 9 3 0 0 0 1 2 1 1 0 0 ... First 5 prediction Probabilities: [0.01110322866588831, 0.011040260083973408, 0.0500018410384655, 0.058350514620542526, 0.7764249444007874, 0.013032950460910797, 0.04363211989402771, 0.01812438853085041, 0.01306074671447277, 0.005228961352258921] [0.006129888817667961, 0.6909778714179993, 0.00999495666474104, 0.01321113295853138, 0.05929763615131378, 0.011746141128242016, 0.005355240777134895, 0.17955265939235687, 0.00285714166238904, 0.020877454429864883] [4.8621659516356885e-05, 0.0038535434287041426, 0.0028695317450910807, 0.00018823184655047953, 0.0004698041011579335, 2.535598468966782e-05, 0.00024283974198624492, 6.665018736384809e-05, 0.9902505874633789, 0.0019847743678838015] [7.613619601443133e-08, 0.0023769750259816647, 3.191944486502507e-08, 1.1447378938100883e-06, 9.242885425919667e-06, 1.369239743809203e-08, 4.6559943456259134e-08, 0.9976122379302979, 1.7965218646054382e-08, 1.3722458902520884e-07] [0.0270396638661623, 0.68876713514328, 0.00023592748038936406, 0.00014866460696794093, 0.0005102533032186329, 0.00015723690739832819, 0.0006998041062615812, 0.2811683714389801, 0.0010804617777466774, 0.00019244980649091303] ...
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:07<00:00, 65.82it/s]
Experiment 10, num_epoch=60, lr=0.0001 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 4 1 8 7 7 7 1 1 1 0 2 9 8 4 1 1 7 0 8 1 4 6 1 4 4 6 3 7 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 4 3 5 2 1 9 2 3 3 7 7 7 3 1 2 3 0 1 1 3 1 5 1 1 9 2 4 8 0 5 3 3 8 2 1 3 6 0 7 8 3 1 0 4 0 1 1 1 0 3 ... First 5 prediction Probabilities: [3.675207608466735e-06, 0.0035449115093797445, 6.856524123577401e-05, 0.0001023114746203646, 0.9957118034362793, 0.0002846618590410799, 7.77649474912323e-05, 5.8612906286725774e-05, 0.00013689312618225813, 1.0761585144791752e-05] [0.004330751486122608, 0.2718481421470642, 0.1694304198026657, 0.1734028160572052, 0.07786080241203308, 0.012183411978185177, 0.018709613010287285, 0.23312927782535553, 0.02711649425327778, 0.011988251470029354] [1.5396359458463849e-06, 8.183119462046307e-06, 0.0001036926987580955, 0.000686376413796097, 7.544350637544994e-07, 2.4426557502010837e-05, 0.00013863196363672614, 1.4056826330488548e-05, 0.99899822473526, 2.418608528387267e-05] [1.117316605814267e-05, 6.17470359429717e-05, 1.8547750642028404e-06, 4.488400008995086e-06, 0.0001398902095388621, 1.5890261551021467e-08, 1.3224467743100377e-08, 0.9997808337211609, 1.0073990530656829e-08, 2.709373170262097e-08] [0.006750059314072132, 0.4626878798007965, 0.00034450870589353144, 0.0004936308832839131, 0.0015320501988753676, 0.0002941825077868998, 0.0011232630349695683, 0.5251645445823669, 0.0002552976075094193, 0.0013545732945203781] ...
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:07<00:00, 62.91it/s]
Experiment 11, num_epoch=60, lr=1e-05 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 4 1 8 2 1 1 1 1 5 1 2 0 5 3 1 7 1 1 5 1 5 9 1 1 2 6 7 7 1 5 4 1 7 5 0 1 1 0 1 1 5 1 1 6 5 2 2 6 1 6 9 2 5 5 4 2 1 4 2 1 7 1 3 1 2 2 4 1 1 3 1 2 1 2 2 4 2 7 1 2 1 2 8 1 2 1 6 0 2 9 4 0 1 4 1 2 1 1 1 1 ... First 5 prediction Probabilities: [0.10492344200611115, 0.21673673391342163, 0.05545021593570709, 0.05862429738044739, 0.3586787283420563, 0.013912678696215153, 0.05327589437365532, 0.023654282093048096, 0.062041811645030975, 0.05270195007324219] [0.04891233891248703, 0.4555073082447052, 0.014295345172286034, 0.07905856519937515, 0.07706701010465622, 0.05993843078613281, 0.0996357724070549, 0.009250203147530556, 0.1056181862950325, 0.050716742873191833] [0.025805609300732613, 0.04559033364057541, 0.04089934378862381, 0.12409646064043045, 0.022133719176054, 0.20019790530204773, 0.1623273491859436, 0.006891149096190929, 0.28510767221450806, 0.0869503989815712] [0.003739203792065382, 0.008932691998779774, 0.3882262110710144, 0.11054537445306778, 0.01790551096200943, 0.16807477176189423, 0.030586156994104385, 0.21135137975215912, 0.02888326160609722, 0.03175545856356621] [0.2353205680847168, 0.497346967458725, 0.015209327451884747, 0.019664358347654343, 0.017861654981970787, 0.005356844048947096, 0.034411314874887466, 0.1255398392677307, 0.04081172123551369, 0.008477488532662392] ...
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:07<00:00, 64.36it/s]
Experiment 12, num_epoch=60, lr=1e-06 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 ... First 5 prediction Probabilities: [0.06305043399333954, 0.2047557681798935, 0.1499205380678177, 0.11704449355602264, 0.10165883600711823, 0.09224885702133179, 0.07504280656576157, 0.07393572479486465, 0.06378984451293945, 0.05855269730091095] [0.06225183233618736, 0.20593437552452087, 0.15090732276439667, 0.1171664223074913, 0.1012086272239685, 0.09199722111225128, 0.07509682327508926, 0.07353604584932327, 0.06356797367334366, 0.058333367109298706] [0.06235850602388382, 0.20558534562587738, 0.15049733221530914, 0.11697548627853394, 0.1018671989440918, 0.0919962003827095, 0.07502511888742447, 0.07366649806499481, 0.06362379342317581, 0.05840453878045082] [0.05891532078385353, 0.2166324257850647, 0.1582135558128357, 0.11843275278806686, 0.10122033953666687, 0.08844458311796188, 0.07246098667383194, 0.07120373845100403, 0.060070138424634933, 0.05440615117549896] [0.05926714465022087, 0.21735140681266785, 0.1570311039686203, 0.11845418810844421, 0.10104519873857498, 0.08816824853420258, 0.07255929708480835, 0.07137015461921692, 0.06025932729244232, 0.054493941366672516] ...
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:07<00:00, 64.04it/s]
Experiment 13, num_epoch=80, lr=0.001 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 4 1 8 4 7 1 1 3 3 0 2 8 8 8 1 7 7 0 8 1 5 6 4 4 4 6 8 4 4 3 0 2 7 5 0 1 1 0 5 7 5 1 9 5 5 2 5 6 1 5 2 3 5 3 2 9 4 3 1 1 7 7 3 1 2 2 0 1 1 1 1 4 1 1 9 6 3 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 1 0 2 1 1 4 6 ... First 5 prediction Probabilities: [4.2811741707282636e-08, 0.0014604568714275956, 0.0003174546000082046, 6.733465852448717e-06, 0.9980792999267578, 1.1659928986773593e-06, 2.900183062592987e-07, 0.00013193768972996622, 2.5229412585758837e-06, 4.620364535412591e-08] [0.004833441227674484, 0.9070056676864624, 0.0045705558732151985, 0.014240668155252934, 0.016047043725848198, 0.007685227785259485, 0.0010719356359913945, 0.031732190400362015, 0.0007954119355417788, 0.012017969973385334] [1.7872615387659607e-07, 2.080280836480597e-07, 4.918425474897958e-06, 6.080935577301716e-07, 6.462031798548651e-09, 9.681323298593725e-09, 5.222059940024337e-07, 2.0457088467651374e-08, 0.9999929666519165, 5.777532692263776e-07] [0.07725397497415543, 0.2778821587562561, 0.00040970719419419765, 0.00013450579717755318, 0.2794397473335266, 0.00622183782979846, 0.20327003300189972, 0.14899136126041412, 0.004328761249780655, 0.002067893510684371] [3.5200580896344036e-05, 0.22997312247753143, 7.461879647507885e-08, 2.054014913710489e-07, 9.760633474797942e-06, 4.374795636863382e-09, 6.430564081227885e-09, 0.7699815034866333, 4.3619228229374585e-09, 1.4830946781785315e-07] ...
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:07<00:00, 64.73it/s]
Experiment 14, num_epoch=80, lr=0.0001 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 2 7 8 7 1 7 7 1 3 0 2 8 8 3 1 1 7 0 2 1 5 6 4 1 2 6 3 4 1 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 6 2 4 1 5 6 9 2 3 4 1 1 7 2 1 1 2 0 1 1 3 1 5 1 1 9 3 4 8 0 5 1 5 8 2 0 5 6 1 7 8 1 1 6 4 0 3 1 1 8 0 ... First 5 prediction Probabilities: [0.0033344768453389406, 0.21035990118980408, 0.2670755684375763, 0.21632708609104156, 0.07613460719585419, 0.07159946858882904, 0.01836422085762024, 0.09494058042764664, 0.012140706181526184, 0.029723359271883965] [0.00010961257794406265, 0.00920670386403799, 0.0006336001679301262, 0.0003305379068478942, 0.00034665263956412673, 5.128418251842959e-06, 9.606747880752664e-06, 0.9892610311508179, 1.993737168959342e-05, 7.719507266301662e-05] [0.0006571479607373476, 3.127018089799094e-06, 0.001373921986669302, 0.004409148823469877, 6.732309998369601e-07, 0.0008213139371946454, 0.02096237801015377, 2.3651911760680377e-05, 0.969757080078125, 0.0019916498567909002] [9.838058900868418e-08, 0.00016312806110363454, 9.958166629076004e-06, 8.174722552212188e-07, 4.8897320084506646e-05, 1.196013954540831e-09, 2.7653422307594155e-07, 0.9997767806053162, 4.382004803460404e-08, 3.7618352877188954e-08] [0.044544003903865814, 0.5297346711158752, 0.10035025328397751, 0.04207940027117729, 0.10412753373384476, 0.05451769754290581, 0.05059325695037842, 0.05834677442908287, 0.006768957246094942, 0.008937465026974678] ...
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:07<00:00, 64.22it/s]
Experiment 15, num_epoch=80, lr=1e-05 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 4 1 2 7 0 1 0 5 3 1 2 6 7 2 1 1 7 1 2 1 5 3 4 4 2 6 4 4 0 3 0 1 5 6 0 1 1 9 1 7 5 1 2 5 5 3 3 6 3 6 1 3 2 4 3 3 2 9 3 5 7 7 3 1 2 3 4 1 6 2 1 3 3 1 3 1 2 2 2 5 1 1 1 2 1 2 6 1 1 1 3 0 1 4 0 1 1 1 1 1 ... First 5 prediction Probabilities: [0.028166281059384346, 0.12037631869316101, 0.2697962522506714, 0.12797240912914276, 0.30145424604415894, 0.023276979103684425, 0.02566259540617466, 0.018914224579930305, 0.04402017220854759, 0.04036048799753189] [0.07697953283786774, 0.5618888139724731, 0.05880311504006386, 0.04711073264479637, 0.05962830409407616, 0.0129085723310709, 0.021895403042435646, 0.0970136970281601, 0.036528147757053375, 0.027243707329034805] [0.012885204516351223, 0.04265232011675835, 0.42107030749320984, 0.25464844703674316, 0.02548087015748024, 0.07027953863143921, 0.05515681952238083, 0.05270921438932419, 0.044554565101861954, 0.020562661811709404] [0.33920130133628845, 0.02773224376142025, 0.049264032393693924, 0.0025823882315307856, 0.0006178451585583389, 0.0001690085482550785, 0.00209756544791162, 0.5711352825164795, 0.004307817667722702, 0.002892513992264867] [0.23514112830162048, 0.21633970737457275, 0.03192823752760887, 0.017747698351740837, 0.06614739447832108, 0.00844479352235794, 0.21146275103092194, 0.02684696950018406, 0.1470993310213089, 0.03884194791316986] ...
100%|████████████████████████████████████████████████████████████████████████████████| 469/469 [00:07<00:00, 64.00it/s]
Experiment 16, num_epoch=80, lr=1e-06 First 100 true labels: 4 7 8 7 1 1 7 4 3 0 2 8 8 3 1 1 7 0 8 1 5 6 4 4 4 6 3 4 4 3 0 1 7 6 0 1 1 0 5 7 5 1 8 5 5 2 9 6 1 5 2 3 5 3 6 9 2 3 4 1 7 7 3 1 2 2 0 1 1 3 1 5 1 1 9 9 4 8 0 5 1 3 8 2 9 5 6 0 7 8 3 0 6 4 0 3 1 1 0 0 ... First 100 true predictions: 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 ... First 5 prediction Probabilities: [0.0677715539932251, 0.18221937119960785, 0.1419350653886795, 0.11495447158813477, 0.10321110486984253, 0.09382335096597672, 0.08047499507665634, 0.08057653903961182, 0.07015734165906906, 0.06487623602151871] [0.06713038682937622, 0.18493396043777466, 0.14239032566547394, 0.11526937037706375, 0.10281574726104736, 0.09374697506427765, 0.0802222341299057, 0.0798962265253067, 0.06945298612117767, 0.06414174288511276] [0.06908639520406723, 0.1780174821615219, 0.14009469747543335, 0.11453182250261307, 0.10275789350271225, 0.09493038058280945, 0.08165793120861053, 0.08138762414455414, 0.07139488309621811, 0.06614084541797638] [0.0581962950527668, 0.21098357439041138, 0.15565793216228485, 0.11807886511087418, 0.1027902364730835, 0.08893561363220215, 0.07442919909954071, 0.07448465377092361, 0.061706021428108215, 0.054737623780965805] [0.05887938663363457, 0.21067874133586884, 0.15344293415546417, 0.11770772188901901, 0.1036686822772026, 0.08846046030521393, 0.07475779950618744, 0.07486001402139664, 0.06220671534538269, 0.05533759295940399] ...
3.2.5 Confusion Matrix¶
plot_cm(experiment_results, ['epoch_num', 'lr'])
3.2.6 Precision-Recall Curve¶
plot_pr(experiment_results, ['epoch_num', 'lr'])
3.2.7 ROC AUC Curve¶
plot_roc_auc(experiment_results, ['epoch_num','lr'], "macro_micro")
plot_roc_auc(experiment_results, ['epoch_num','lr'], "all")